branch_name
stringclasses 15
values | target
stringlengths 26
10.3M
| directory_id
stringlengths 40
40
| languages
sequencelengths 1
9
| num_files
int64 1
1.47k
| repo_language
stringclasses 34
values | repo_name
stringlengths 6
91
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
| input
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>rachelober/PSYU612<file_sep>/app/controllers/page_controller.rb
class PageController < ApplicationController
layout 'standard'
def index
if request.get?
if logged_in?
flash.now[:notice] = 'You have successfully logged off.'
end
logout
elsif request.post?
userid = params[:login][:userid]
if userid.empty?
flash.now[:warning] = 'Please fill in the userid that will be tested.'
else
@user = User.find(:first, :conditions => ['id = ?', userid])
session[:user_id] = @user.id
redirect_to :action => 'guidelines'
end
end
end
def guidelines
@user = logged_in_user
if request.post?
redirect_to :controller => 'article', :action => 'article1'
end
end
def summary
@user = logged_in_user
if request.post?
redirect_to :controller => 'page', :action => 'index'
end
end
end
<file_sep>/app/controllers/application.rb
# Filters added to this controller apply to all controllers in the application.
# Likewise, all the methods added will be available for all controllers.
class ApplicationController < ActionController::Base
# Pick a unique cookie name to distinguish our session data from others'
session :session_key => '_psyu612_session_id'
#functions for logged-in sessions
def logged_in?
session[:user_id] != nil
end
def logged_in_user
User.find session[:user_id]
rescue ActiveRecord::RecordNotFound
nil
end
def logout
session[:user_id] = nil
end
end
<file_sep>/app/helpers/application_helper.rb
# Methods added to this helper will be available to all templates in the application.
module ApplicationHelper
FLASH_TYPES = [
:notice, # positive feedback (ie - action successful)
:message, # neutral (ie - reminders)
:warning # errors (ie - login failed)
]
def flash_messages
flash.keys.collect { |k|
content_tag("div", flash[k], :class => "flash_#{k}")}.join("\n")
end
end
<file_sep>/app/controllers/pause_controller.rb
class PauseController < ApplicationController
layout 'standard'
# Pause after reading first article
def pausea1
if request.post?
redirect_to :controller => 'question', :action => 'question1'
end
end
# Pause after reading second article
def pausea2
if request.post?
redirect_to :controller => 'question', :action => 'question2'
end
end
# Pause after reading third article
def pausea3
if request.post?
redirect_to :controller => 'question', :action => 'question3'
end
end
# Pause after answering first article questions
def pauseq1
if request.post?
redirect_to :controller => 'article', :action => 'article2'
end
end
# Pause after answering second article questions
def pauseq2
if request.post?
redirect_to :controller => 'article', :action => 'article3'
end
end
# Pause after answering third article questions
def pauseq3
if request.post?
redirect_to :controller => 'question', :action => 'general'
end
end
end
<file_sep>/app/controllers/article_controller.rb
class ArticleController < ApplicationController
layout 'standard'
def article1
@user = logged_in_user
@article = @user.article1
@article_width = @user.article1_width
if request.post?
redirect_to :controller => 'pause', :action => 'pausea1'
end
end
def article2
@user = logged_in_user
@article = @user.article2
@article_width = @user.article2_width
if request.post?
redirect_to :controller => 'pause', :action => 'pausea2'
end
end
def article3
@user = logged_in_user
@article = @user.article3
@article_width = @user.article3_width
if request.post?
redirect_to :controller => 'pause', :action => 'pausea3'
end
end
end
<file_sep>/app/controllers/result_controller.rb
class ResultController < ApplicationController
def index
@user_pages, @users = paginate :users, :per_page => 24, :order_by => 'id'
end
def calculate
@users = User.find(:all)
@users.each{|user|
article1 = @user.article1
article2 = @user.article2
article3 = @user.article3
article1_width = @user.article1_width
article2_width = @user.article2_width
article3_width = @user.article3_width
answers = Answer.find(:all, :conditions =>
end
end
<file_sep>/app/controllers/question_controller.rb
class QuestionController < ApplicationController
layout 'standard'
def question1
@user = logged_in_user
@article = @user.article1
@answer = Answer.find (:first, :conditions => ['id = ?', @user.id])
end
def submit1
@user = logged_in_user
@answer = Answer.find (:first, :conditions => ['id = ?', @user.id])
@answer.answer1_01 = params[:question][:answer_1]
@answer.answer1_02 = params[:question][:answer_2]
@answer.answer1_03 = params[:question][:answer_3]
@answer.answer1_04 = params[:question][:answer_4]
@answer.answer1_05 = params[:question][:answer_5]
@answer.answer1_06 = params[:question][:answer_6]
@answer.answer1_07 = params[:question][:answer_7]
@answer.answer1_08 = params[:question][:answer_8]
@answer.answer1_09 = params[:question][:answer_9]
@answer.answer1_10 = params[:question][:answer_10]
@answer.answer1_11 = params[:question][:answer_11]
@answer.answer1_12 = params[:question][:answer_12]
@answer.answer1_13 = params[:question][:answer_13]
@answer.answer1_14 = params[:question][:answer_14]
@answer.answer1_15 = params[:question][:answer_15]
@answer.save
redirect_to :controller => 'pause', :action => 'pauseq1'
end
def question2
@user = logged_in_user
@article = @user.article2
end
def submit2
@user = logged_in_user
@answer = Answer.find (:first, :conditions => ['id = ?', @user.id])
@answer.answer2_01 = params[:question][:answer_1]
@answer.answer2_02 = params[:question][:answer_2]
@answer.answer2_03 = params[:question][:answer_3]
@answer.answer2_04 = params[:question][:answer_4]
@answer.answer2_05 = params[:question][:answer_5]
@answer.answer2_06 = params[:question][:answer_6]
@answer.answer2_07 = params[:question][:answer_7]
@answer.answer2_08 = params[:question][:answer_8]
@answer.answer2_09 = params[:question][:answer_9]
@answer.answer2_10 = params[:question][:answer_10]
@answer.answer2_11 = params[:question][:answer_11]
@answer.answer2_12 = params[:question][:answer_12]
@answer.answer2_13 = params[:question][:answer_13]
@answer.answer2_14 = params[:question][:answer_14]
@answer.answer2_15 = params[:question][:answer_15]
@answer.save
redirect_to :controller => 'pause', :action => 'pauseq2'
end
def question3
@user = logged_in_user
@article = @user.article3
end
def submit3
@user = logged_in_user
@answer = Answer.find (:first, :conditions => ['id = ?', @user.id])
@answer.answer3_01 = params[:question][:answer_1]
@answer.answer3_02 = params[:question][:answer_2]
@answer.answer3_03 = params[:question][:answer_3]
@answer.answer3_04 = params[:question][:answer_4]
@answer.answer3_05 = params[:question][:answer_5]
@answer.answer3_06 = params[:question][:answer_6]
@answer.answer3_07 = params[:question][:answer_7]
@answer.answer3_08 = params[:question][:answer_8]
@answer.answer3_09 = params[:question][:answer_9]
@answer.answer3_10 = params[:question][:answer_10]
@answer.answer3_11 = params[:question][:answer_11]
@answer.answer3_12 = params[:question][:answer_12]
@answer.answer3_13 = params[:question][:answer_13]
@answer.answer3_14 = params[:question][:answer_14]
@answer.answer3_15 = params[:question][:answer_15]
@answer.save
redirect_to :controller => 'pause', :action => 'pauseq3'
end
def general
@user = logged_in_user
end
def general_submit
@user = logged_in_user
@answer = User.find (:first, :conditions => ['id = ?', @user.id])
@answer.age = params[:general][:age]
@answer.avgcompuse = params[:general][:avgcompuse]
@answer.comptime = params[:general][:comptime]
@answer.howoftennews = params[:general][:howoftennews]
@answer.onlinepaper = params[:general][:onlinepaper]
@answer.monitorsize = params[:general][:monitorsize]
@answer.resolution = params[:general][:resolution]
@answer.save
redirect_to :controller => 'page', :action => 'summary'
end
end
| b35c93e1d20878ca250d3f224ccc6c9a5973505f | [
"Ruby"
] | 7 | Ruby | rachelober/PSYU612 | a99f189ac91246995b9bb2716f86c653f7a58860 | 8ef7a6bccebf934de1a240dbc56e32447525e3b7 | |
refs/heads/master | <file_sep>'''
Created on Mar 18, 2015
@author: Wschive
'''
import MySQLdb
import ast
from data.App import App
from _ast import TryExcept
import json
def connect():
connect.db = MySQLdb.connect(host="", # your host, usually localhost
user="", # your username
passwd="", # your password
db="wilhelmw_thesis") # name of the data base
# you must create a Cursor object. It will let
# you execute all the queries you need
connect.cursor = connect.db.cursor()
connect.db.set_character_set('utf8')
connect.cursor.execute('SET NAMES utf8;')
connect.cursor.execute('SET CHARACTER SET utf8;')
connect.cursor.execute('SET character_set_connection=utf8;')
print "DB connected and cursor created"
def saveApp(app):
if not connect.db.open:
connect()
sql = """INSERT INTO `App`(`name`, `permissions`, `similar`, `logoUrl`, `packageName`) VALUES (%s,%s,%s,%s,%s)"""
if(app):
connect.cursor.execute(sql, (app.name,json.dumps(app.permissions),json.dumps(app.similar),app.logo, app.packageName))
connect.db.commit()
else:
print "element was missing from full App"
print "saved ", app.packageName
def getAll():
if not connect.db.open:
connect()
connect.cursor.execute("SELECT * FROM App")
# print all the first cell of all the rows
for row in connect.cursor.fetchall() :
print row[0]
def getApp(packageName):
if not connect.db.open:
connect()
sql = """SELECT * FROM App WHERE packageName =%s"""
connect.cursor.execute(sql,[packageName])
row = connect.cursor.fetchone()
realName = row[0]
permissions = ast.literal_eval(row[1])
similar = ast.literal_eval(row[2])
logo= row[3]
packageName = row[4]
app = App(realName, packageName,logo, similar, permissions, )
return app
def getJSonApp(packageName):
app = getApp(packageName)
data = {}
data["permissions"] = app.permissions
data["logo"] = app.logo
data["name"] = app.name
data["packageName"] = app.packageName
return data
def doesExist(packageName):
if not connect.db.open:
connect()
#returns 1 if found, 0 if not
connect.cursor.execute("SELECT EXISTS(SELECT 1 FROM App WHERE packageName = '%s')" % (packageName))
result = connect.cursor.fetchone()
return result[0] == 1
<file_sep>'''
Created on Mar 4, 2015
@author: Wschive
'''
import BaseHTTPServer, SimpleHTTPServer, ssl, requests
import json
from BaseHTTPServer import BaseHTTPRequestHandler
from urlparse import urlparse
from data.App import App
from database import db
from getApps import getApps
from risk import risk
import io
import os.path
def getIP():
print "getting ip"
r = requests.get("http://jsonip.com")
tempJson = r.json()
ip = tempJson["ip"]
return ip.encode("utf-8")
def run(server_class=BaseHTTPServer.HTTPServer):
ip = getIP()
port = 8888
handler = MyHandler
server_address = (ip, port)
httpd = server_class(server_address, handler)
print "starting server at", str(ip)+":"+str(port)
httpd.serve_forever()
class MyHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200, "ok")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'text/html')
self.end_headers()
print self.path
print "hi"
hi = urlparse(self.path)
if(hi.path == "/replace"):
print "replace app called"
rawResult = replaceRequest(hi.query)#list of appnames
self.wfile.write(createReplaceResponse(rawResult))
def do_POST(self):
self.send_error(502, "not implemented")
def createReplaceResponse(raw):
list = []
for app in raw:
unicodeString = db.getJSonApp(app.packageName)
list.append(unicodeString)
return json.dumps(list)
def replaceRequest(packageName):
app = getOrDL(packageName)
resultList = []
if(app.similar):
for similar in app.similar:
temp = getOrDL(similar)
if(temp is not None):
resultList.append(temp)
# app.similar = sorted(resultList, key=getValue)#sorted list of similar apps
app.similar = resultList
else:
print "similar apps list does not exist"
return ["no similar apps"]
return app.similar#list of suggested apps
def getOrDL(packageName):
# app =db.getApp(name)
print "getOrDL", packageName
try:
app = db.getApp(packageName)#add if published is old, redownload
except:
app = requestApp(packageName)
# app = downloadApp(packageName)
if(app is not None):
try:
db.saveApp(app)
except:
print "app exists in db", app.packageName
return app
def getValue(app):
fuzzy = app.analysis["FuzzyRisk"]
return fuzzy["VALUE"]
def requestApp(packageName):
permissionDict = getApps.getPermissions(packageName)#Dict (name: realName, permissions: permissions)
appname = permissionDict["name"]
logo = permissionDict["logo"]
similar = getApps.getOnlySimilar(packageName)
permissions = permissionDict["permissions"]
app = App(appname, packageName, logo, similar, permissions)
return app
def downloadApp(packageName):
# print "downloadApp", packageName
# appInfo = getAppInfo(packageName)
# if(appInfo["price"] == "free"):
# try:
# permissions = risk.permissions(packageName)
# # analysis = risk.run(name)
# except:
# getApps.run(packageName)
# permissions = risk.permissions(packageName)
# # analysis = risk.run(name)
# s = appInfo["description"]
# infoline = s.split(".",1)[0]#not used
# similar = appInfo["recommendedApps"]#is a list
# logo = appInfo["logo"]
# appname = appInfo["appName"]
# app = App(appname, packageName, logo, infoline, similar, permissions)
# return app
# print "app " + packageName + " is not free, will not be downloaded"
return None
# returns json with all the info
def getAppInfo(packageName):
#
# if(os.path.isfile("data/"+packageName+".json") ):
# file= open("data/"+packageName+".json")
# data = json.load(file)
# return data
#
# # works, but limited amount of calls
# print "getting Appinfo for ", packageName
# apiKey = {"key" :"9494f057c1a1a67ab30e5e7afdc6afe2"}
# r = requests.get("http://api.playstoreapi.com/v1.1/apps/"+packageName, params = apiKey)
# data = json.loads(r.content)
# with open("data/" + packageName + ".json", 'w') as f:
# f.write(json.dumps(data))
# return data
# print data["recommendedApps"]
return None
<file_sep>from risk import risk
from recommend import recommender
from server import server
from database import db
import socket
import sys
from thread import *
import traceback
import json
import select
import BaseHTTPServer, SimpleHTTPServer, ssl
from data import App
def main() :
# name = "com.hdezninirola.frequency"
name = "no.nrk.yr"
# name = "com.rovio.angrybirds"
# dir = "/Users/Wschive/Desktop/"
# name = "com.kabam.underworldandroid"
# getApps.run("com.bitdefender.clueful")
# risk.run("com.bitdefender.clueful")
# recommender.recommend()
# s = requests.Session()
# # s.auth=('<EMAIL>', '1.9.Alpha')
# s.post("https://accounts.google.com/ServiceLogin", {"Email":'<EMAIL>', "Passwd":'<PASSWORD>'})
# print s
# value = {"id":"com.squareenix.smoothieswipe"}
# # r = s.get("https://play.google.com/store/apps/details", params = value)
# # print r
#
# g = s.get("http://play.google.com/store/apps/similar", params = value)
# print g
# print(g.url)
# print g.content
# works, but limited amount of calls
# apiKey = {"key" :"9494f057c1a1a67ab30e5e7afdc6afe2"}
# r = requests.get("http://api.playstoreapi.com/v1.1/apps/"+name, params = apiKey)
# data = json.loads(r.content)
# print data
# print data["recommendedApps"]
db.connect()
server.run()
print "it ran!"
if __name__ == "__main__" :
main()<file_sep># Recommender
in the database folder:
use the SQL in dbInit to create a database with some entries
open db.py and enter the information for connecting to the database
in the getApps folder:
open config.py
enter email and password of a valid google account.
enter an androidID of a device where the above google account is connected.
Install the python dependencies:
requests
beautifulsoup4
mysqldb - http://mysql-python.blogspot.no/2012/11/is-mysqldb-hard-to-install.html
pip install is a good way of installing the first two - https://pip.pypa.io/en/latest/installing.html<file_sep>
from androguard.core.bytecodes import apk
from androguard.core import androconf
from androguard.core.analysis import risk
from androguard.core.bytecodes.apk import APK
def display_result(res) :
for i in res :
print "\t", i
for j in res[i] :
print "\t\t", j, res[i][j]
def analyze_app(filename, ri, a) :
print filename
return ri.with_apk(a)
def run(name,dir = ""):
ri = risk.RiskIndicator()
ri.add_risk_analysis( risk.RedFlags() )
ri.add_risk_analysis( risk.FuzzyRisk() )
ret_type = androconf.is_android(dir + name + ".apk")
if ret_type == "APK" :
a = apk.APK(dir + name + ".apk")
return analyze_app( name, ri, a )
else:
print "ret_type was not APK"
def permissions(name, dir=""):
a = APK(dir+name+".apk")
#TODO: append list with score from watchdog
return a.permissions
<file_sep># from scikits.crab import datasets
# from scikits.crab.models import MatrixPreferenceDataModel
# from scikits.crab.metrics import pearson_correlation
# from scikits.crab.similarities import UserSimilarity
# from scikits.crab.recommenders.knn import UserBasedRecommender
# from scikits.crab.recommenders.knn import ItemBasedRecommender
# from scikits.crab.similarities.basic_similarities import ItemSimilarity
# import json
# import pandas as panda
# from pandas.io.json import json_normalize
# from sklearn.metrics.pairwise import euclidean_distances
# from scikits.crab.recommenders.knn.item_strategies import ItemsNeighborhoodStrategy
#
#
#
# global data
#
# def recommend():
#
# movies = datasets.load_sample_movies()
# #Build the model
# model = MatrixPreferenceDataModel(movies.data)
# #Build the similarity
# similarity = UserSimilarity(model, pearson_correlation)
# #Build the User based recommender
# recommender = UserBasedRecommender(model, similarity, with_preference=True)
# #Recommend items for the user 5 (Toby)
# recommender.recommend(5)
#
# items_strategy = ItemsNeighborhoodStrategy()
# similarity = ItemSimilarity(model, euclidean_distances)
# recsys = ItemBasedRecommender(model, similarity, items_strategy)
# # recsys.recommend('Leopoldo Pires')
#
# # >>> recsys.recommend('Leopoldo Pires')
# # ['Just My Luck', 'You, Me and Dupree']
# # >>> #Return the 2 explanations for the given recommendation.
# # >>> recsys.recommended_because('Leopoldo Pires', 'Just My Luck',2)
# # ['The Night Listener', 'Superman Returns']
#
# try:
# with open('data/data.json') as file:
# print "opening json"
# list = json.load(file)
# except:
# print "no data available"
#
# global data
# data = json_normalize(list)
# model = MatrixPreferenceDataModel(data)
#
# # beers = df.beer_name.unique()
# # apps = data.Name.unique()
# # simple_distances = []
# # for app1 in apps:
# # print "starting", app1
# # for app2 in apps:
# # if app1 != app2:
# # row = [app1, app2] + calculateBasicSimilarity(app1, app2)
# # simple_distances.append(row)
#
# print data.head(1)
# # ALL_FEATURES = ['RedFlags', 'FuzzyValue']
#
# def getRedFlags(app):
# x = 1
#
# def getFuzzyValue(app):
# # mask = data.Name.isin(str(app))
# value = data.sort('FuzzyValue')
#
# # def get_beer_reviews(beer, common_users):
# # mask = (data.review_profilename.isin(common_users)) & (data.beer_name==beer)
# # reviews = data[mask].sort('review_profilename')
# # reviews = reviews[reviews.review_profilename.duplicated()==False]
# # return reviews
# # beer_1_reviews = get_beer_reviews(beer_1, common_reviewers)
# # beer_2_reviews = get_beer_reviews(beer_2, common_reviewers)
# #
# # cols = ['beer_name', 'review_profilename', 'review_overall', 'review_aroma', 'review_palate', 'review_taste']
# # beer_2_reviews[cols].head()
# #
# def calculateBasicSimilarity(app1, app2):
# # find common reviewers
# # beer_1_reviewers = data[data.beer_name==beer1].review_profilename.unique()
# # beer_2_reviewers = data[data.beer_name==beer2].review_profilename.unique()
# # common_reviewers = set(beer_1_reviewers).intersection(beer_2_reviewers)
# app1Value = getFuzzyValue(app1)
# app2Value = getFuzzyValue(app2)
# return euclidean_distances(app1Value, app2Value)
<file_sep>#!/usr/bin/python
# Do not remove
import sys
from pprint import pprint
from googleplay import GooglePlayAPI
from helpers import sizeof_fmt
import config
from bs4 import BeautifulSoup
import requests
def run(packagename):
if (len(sys.argv) == 3):
filename = sys.argv[2]
else:
filename = packagename + ".apk"
# Connect
api = GooglePlayAPI(config.ANDROID_ID)
api.login(config.GOOGLE_LOGIN, config.GOOGLE_PASSWORD, config.AUTH_TOKEN)
# Get the version code and the offer type from the app details
m = api.details(packagename)
doc = m.docV2
vc = doc.details.appDetails.versionCode
ot = doc.offer[0].offerType
# Download
print "Downloading %s..." % sizeof_fmt(doc.details.appDetails.installationSize),
data = api.download(packagename, vc, ot)
open(filename, "wb").write(data)
print "Done"
def getSimilarWithLogo(packageName):
print "getting apps similar to ", packageName
s = requests.Session()
s.post("https://accounts.google.com/ServiceLogin", {"Email":config.GOOGLE_LOGIN, "Passwd":config.GOOGLE_PASSWORD})
value = {"id":packageName}
# r = s.get("https://play.google.com/store/apps/details", params = value)
# print r
g = s.get("http://play.google.com/store/apps/similar", params = value)
soup = BeautifulSoup( g.content)
apps = soup.find_all("a", class_ = "title")
logos = soup.find_all("img", class_ = "cover-image")
result = {}
for link, logo in zip(apps, logos):
line = link.get("href")
image = logo.get("data-cover-small")
result[(line.split("=",1)[1])] = image
return result
def getOnlySimilar(packageName):
print "getting apps similar to ", packageName
s = requests.Session()
s.post("https://accounts.google.com/ServiceLogin", {"Email":config.GOOGLE_LOGIN, "Passwd":config.GOOGLE_PASSWORD})
value = {"id":packageName}
# r = s.get("https://play.google.com/store/apps/details", params = value)
# print r
g = s.get("http://play.google.com/store/apps/similar", params = value)
soup = BeautifulSoup( g.content)
apps = soup.find_all("a", class_ = "title")
result = []
for link in apps:
line = link.get("href")
result.append(line.split("=",1)[1])
return result
def getPermissions(packageName, multiple = False):
api = GooglePlayAPI(config.ANDROID_ID)
api.login(config.GOOGLE_LOGIN, config.GOOGLE_PASSWORD, config.AUTH_TOKEN)
result = {}
# Only one app
if (not multiple):
response = api.details(packageName)
permissions = []
values = response.docV2.details.appDetails.permission._values
permissions = [x.encode('utf-8') for x in values]
name = response.docV2.title
result["name"] = name.encode("utf8")
result["permissions"] = permissions
logo = response.docV2.image._values[0]
image = logo.imageUrl.encode("utf8")
result["logo"] = image
return result
else: # More than one app, not tested or used
response = api.bulkDetails(packageName)
for entry in response.entry:
if (not not entry.ListFields()): # if the entry is not empty
result[entry.doc.docid] = entry.doc.details.appDetails.permission
return result
<file_sep>'''
Created on Mar 18, 2015
@author: Wschive
'''
class App:
def __init__(self, name, packageName, logo, similar, permissions):
self.name = name
self.logo = logo
self.similar = similar
self.permissions = permissions
self.packageName = packageName
| 586a300cace0cb97b2c963a27a8c17f299d61007 | [
"Markdown",
"Python"
] | 8 | Python | SHood55/Rec | 265f48a48228e1f0a43b1ece48833e5395263b0b | ff4cf598cffa44b0b96fdb0b9b60755eb7a9b716 | |
refs/heads/master | <repo_name>SteveUXC/XUTransition<file_sep>/README.md
# XUTransition
转场动画收集,文章底部附有参考的博客
<file_sep>/XUTrasition/XUTrasition/MainTableViewController.swift
//
// MainTableViewController.swift
// XUTrasition
//
// Created by CNCOMMDATA on 2017/5/19.
// Copyright © 2017年 轩慧闯. All rights reserved.
//
import UIKit
class MainTableViewController: UITableViewController {
let titles = ["弹性pop"]
override func viewDidLoad() {
super.viewDidLoad()
}
}
// MARK: - Table view data source
extension MainTableViewController {
override func numberOfSections(in tableView: UITableView) -> Int {
return 1
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return titles.count
}
}
// MARK: - Table view delegate
extension MainTableViewController {
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
switch indexPath.row {
case 0:
let spring = XUStringVC1ViewController()
navigationController?.pushViewController(spring, animated: true)
default:
return
}
}
}
| b75180985fe3e8d2153c4549992aa6134fba963c | [
"Markdown",
"Swift"
] | 2 | Markdown | SteveUXC/XUTransition | 54120a7456480e01f9dd910836226ef974e57443 | b1b3be106fc91b9c1180d13dc8713b041b329803 | |
refs/heads/master | <file_sep># ScreenShotView
Android实现屏幕截图的空间,可以对图片进行涂鸦、画圈,可将修改图片保存到本地
对涂鸦可以进行撤销操作
1. 项目添加依赖:
project/build.gradle中添加:
allprojects {
repositories {
...
maven { url 'https://jitpack.io' }
}
}
project/app/build.gradle中添加:
dependencies {
compile 'com.github.luweibin3118:ScreenShotView:v1.0.1'
}
2. 需要截图的View中添加,创建ScreenShotView的时候只需要将想要被截图的Activity传入即可,进入截图功能就会针对当前Activity进行截图
ScreenShotView screenShotView = new ScreenShotView(MainActivity.this);
或者
<com.lwb.screenshot.ScreenShotView
android:layout_width="match_parent"
android:layout_height="match_parent" />
3. 提供了一些界面定制的方法:
/**
* 设置可以选择的颜色值数组, eg:0xffff0000;
*
* @param colors
*/
public void setColors(int[] colors) {
this.colors = colors;
}
/**
* 设置工具栏背景
*
* @param toolBarBackgroundColor
*/
public void setToolBarBackgroundColor(int toolBarBackgroundColor) {
this.toolBarBackgroundColor = toolBarBackgroundColor;
}
/**
* 设置工具栏字体大小
*
* @param toolBarTextSize
*/
public void setToolBarTextSize(int toolBarTextSize) {
this.toolBarTextSize = toolBarTextSize;
}
/**
* 设置工具栏字体颜色
*
* @param toolBarTextColor
*/
public void setToolBarTextColor(int toolBarTextColor) {
this.toolBarTextColor = toolBarTextColor;
}
/**
* 设置是否将截图放大
*
* @param scaleShot
*/
public void setScaleShot(boolean scaleShot) {
this.scaleShot = scaleShot;
}
4. 效果图如下:
双击屏幕截图:

截图添加涂鸦或者画圆:


<file_sep>package com.lwb.screenshot;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
public class MainActivity extends Activity {
ScreenShotView screenShotView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
screenShotView = new ScreenShotView(MainActivity.this);
screenShotView.setColors(new int[]{0xffffff00, 0xffff0000, 0xff00ff00, 0xff0000ff});
screenShotView.setVisibility(View.GONE);
findViewById(R.id.test_screenshot).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
screenShotView.setVisibility(View.VISIBLE);
}
});
}
@Override
public void onBackPressed() {
if (screenShotView.getVisibility() == View.VISIBLE) {
screenShotView.setVisibility(View.GONE);
return;
}
super.onBackPressed();
}
}
| c2f439c9413726d0d2c2d850c1b067a9e5f8e0e3 | [
"Markdown",
"Java"
] | 2 | Markdown | dcn01/ScreenShotView | df60ccea4a0deeeb8aacae3f44c3ef7bafa45855 | 7dca053a1b813a9cbd2745e4e4023504f84c983d | |
refs/heads/master | <repo_name>CMorooney/SpyTag2<file_sep>/SpyTag2.0/MainActivity.cs
using System;
using Android.App;
using Android.Support.V4.App;
using Android.Content.PM;
using Android.Widget;
using Android.OS;
using Color = Android.Graphics.Color;
using Int = Java.Lang.Integer;
namespace SpyTag2
{
[Activity (Label = "SpyTag", MainLauncher = true, Icon = "@drawable/app_icon", LaunchMode = LaunchMode.SingleTask)]
public class MainActivity : Activity
{
bool HasBeenPrompted;
AlertDialog PlayPrompt;
#region layout variables
TextView Text;
TextView CountdownText;
#endregion
protected override void OnCreate (Bundle bundle){
base.OnCreate (bundle);
HasBeenPrompted = false;
SetContentView (Resource.Layout.Main);
Text = FindViewById<TextView>(Resource.Id.text);
CountdownText = FindViewById<TextView> (Resource.Id.countdown);
//create alert dialog to prompt user of "challenge" and respond appropriately
var promptBuilder = new AlertDialog.Builder (this)
.SetPositiveButton ("Yes", (s, args) => {
//keep play state updated!
((ApplicationState)Application).PlayState = PlayState.AcceptedChallenge;
Console.WriteLine ("************ CHALLENGE ACCEPTED *************");
((ApplicationState)Application).Timer.Stop();
//set seconds to countdown before player loses
((ApplicationState)Application).SecondsToCountDown = 20;
((ApplicationState)Application).Timer.Start();
})
.SetNegativeButton ("No", (s, args) => {
//keep play state updated!
((ApplicationState)Application).PlayState = PlayState.Coward;
((ApplicationState)Application).Timer.Stop();
RunOnUiThread(() => Text.Text = "Coward.");
});
PlayPrompt = promptBuilder.Create ();
PlayPrompt.SetMessage ("Will you attempt to disarm? 5 Seconds to comply...");
PlayPrompt.SetTitle ("Tag nearby!");
}
void HandleTimerTick (){
//only tick if activity is running
if (!IsFinishing) {
if (((ApplicationState)Application).SecondsToCountDown == 0) {
((ApplicationState)Application).Timer.Stop ();
switch (((ApplicationState)Application).PlayState) {
case PlayState.InRange:
PlayPrompt.Dismiss ();
((ApplicationState)Application).PlayState = PlayState.Coward;
RunOnUiThread (() => Text.Text = "Coward.");
break;
case PlayState.AcceptedChallenge:
RunOnUiThread (() => {
Text.Text = "You've been tagged";
CountdownText.Text = String.Empty;
});
break;
}
} else {
((ApplicationState)Application).SecondsToCountDown--;
switch (((ApplicationState)Application).PlayState) {
case PlayState.InRange:
RunOnUiThread (() => PlayPrompt.SetMessage ("Will you attempt to disarm? " + ((ApplicationState)Application).SecondsToCountDown + " Seconds to comply..."));
break;
case PlayState.AcceptedChallenge:
RunOnUiThread (() => CountdownText.Text = "You have " + ((ApplicationState)Application).SecondsToCountDown + " seconds to find the tag");
break;
}
}
}
}
void HandleRadiusExit (){
((ApplicationState)Application).PlayState = PlayState.Idle;
RunOnUiThread(() => Text.Text = "Bye bye, tag");
}
void HandleRadiusEnter (){
((ApplicationState)Application).PlayState = PlayState.InRange;
RunOnUiThread (() => {
Text.Text = "Yo there's something nearby";
});
}
void HandleProximityImmediate (){
RunOnUiThread (() => {
if(((ApplicationState)Application).PlayState == PlayState.AcceptedChallenge){
Text.Text = "Tag Disarmed!";
((ApplicationState)Application).PlayState = PlayState.CompletedChallenge;
((ApplicationState)Application).Timer.Stop();
}
});
}
void HandleProximityFar (){
RunOnUiThread (() => Text.Text = "Tag Detected: far");
}
void HandleProximityClose (){
RunOnUiThread (() => {
if (!HasBeenPrompted && !((ApplicationState)Application).IsInBackground && !IsFinishing) {
HasBeenPrompted = true;
//this should start at 5 for initial prompt to play
((ApplicationState)Application).SecondsToCountDown = 5;
//start countdown timer to accept
((ApplicationState)Application).Timer.Start();
//show prompt to play
PlayPrompt.Show ();
Text.Text = "Tag Detected: close";
}
});
}
protected override void OnPause (){
base.OnPause ();
((ApplicationState)Application).IsInBackground = true;
((ApplicationState)Application).BeaconManager.SetBackgroundMode (((ApplicationState)Application), true);
}
protected override void OnResume ()
{
base.OnResume ();
((ApplicationState)Application).IsInBackground = false;
((ApplicationState)Application).BeaconManager.SetBackgroundMode (((ApplicationState)Application), false);
//subscribe to Application Events. These need to be in OnResume so they don't get called before Activity is running
((ApplicationState)Application).ProximityClose += HandleProximityClose;
((ApplicationState)Application).ProximityFar += HandleProximityFar;
((ApplicationState)Application).ProximityImmediate += HandleProximityImmediate;
((ApplicationState)Application).RadiusExit += HandleRadiusExit;
((ApplicationState)Application).RadiusEnter += HandleRadiusEnter;
((ApplicationState)Application).TimerTick += HandleTimerTick;
}
}
}
<file_sep>/SpyTag2.0/Enums/PlayState.cs
using System;
namespace SpyTag2
{
public enum PlayState
{
Idle,
InRange,
AcceptedChallenge,
CompletedChallenge,
Coward
}
}
<file_sep>/SpyTag2.0/ApplicationState.cs
using System;
using Android.App;
using Android.Content;
using System.Linq;
using System.Timers;
using Android.Runtime;
using RadiusNetworks.IBeaconAndroid;
using Color = Android.Graphics.Color;
using Int = Java.Lang.Integer;
namespace SpyTag2
{
public delegate void ProximityClose();
public delegate void ProximityFar();
public delegate void ProximityImmediate();
public delegate void RadiusEnter();
public delegate void RadiusExit();
public delegate void TimerTick();
[Application]
public class ApplicationState : Application, IBeaconConsumer
{
public event ProximityClose ProximityClose;
public event ProximityFar ProximityFar;
public event ProximityImmediate ProximityImmediate;
public event RadiusExit RadiusExit;
public event RadiusEnter RadiusEnter;
public event TimerTick TimerTick;
const string UUID = "01122334-4556-6778-899a-abbccddeeff0";
const string TagId = "Bomb";
public PlayState PlayState;
public IBeaconManager BeaconManager;
public int SecondsToCountDown;
public Timer Timer;
public bool IsInBackground;
public bool HasBeenNotified;
MonitorNotifier MonitoringNotifier;
RangeNotifier RangeNotifier;
Region MonitoringRegion, RangingRegion;
public ApplicationState (IntPtr handle, JniHandleOwnership ownerShip) : base(handle, ownerShip){}
public override void OnCreate (){
base.OnCreate ();
HasBeenNotified = false;
BeaconManager = IBeaconManager.GetInstanceForApplication (this);
MonitoringNotifier = new MonitorNotifier ();
RangeNotifier = new RangeNotifier ();
//set to idle on load
PlayState = PlayState.Idle;
Int maj = (Int)1;
Int min = (Int)104;
MonitoringRegion = new Region (TagId, UUID, maj, min);
RangingRegion = new Region (TagId, UUID, maj, min);
MonitoringNotifier.EnterRegionComplete += EnteredRadius;
MonitoringNotifier.ExitRegionComplete += ExitedRadius;
//bind beacon manager to application so it doens't die in backgrounding
//set background scan intervals
BeaconManager.Bind (this);
BeaconManager.SetBackgroundScanPeriod (1500);
BeaconManager.SetBackgroundBetweenScanPeriod (1500);
RangeNotifier.DidRangeBeaconsInRegionComplete += DistanceToRegionChanged;
//enabled timer that elapses every 1.5 second
Timer = new Timer {
Interval = 1500
};
Timer.Elapsed += TimerElapsed;
}
void TimerElapsed (object sender, ElapsedEventArgs e){
TimerTick ();
}
void DistanceToRegionChanged (object sender, RangeEventArgs e){
if (e.Beacons.Count > 0 && PlayState != PlayState.Coward && PlayState != PlayState.CompletedChallenge) {
var beacon = e.Beacons.FirstOrDefault ();
switch ((ProximityType)beacon.Proximity) {
case ProximityType.Immediate:
ProximityImmediate ();
break;
case ProximityType.Near:
if (IsInBackground && !HasBeenNotified) {
HasBeenNotified = true;
var intent = new Intent (this, typeof(MainActivity));
TaskStackBuilder stackBuilder = TaskStackBuilder.Create(this);
stackBuilder.AddNextIntent(intent);
PendingIntent pendingIntent = stackBuilder.GetPendingIntent(0, PendingIntentFlags.UpdateCurrent);
// Instantiate the builder and set notification elements:
var builder = new Notification.Builder (this)
.SetContentTitle ("Tag Nearby!")
.SetContentText ("Tap To Accept")
.SetContentIntent (pendingIntent)
.SetSmallIcon (Resource.Drawable.notification_icon)
.SetDefaults(NotificationDefaults.All);
// Build the notification:
Notification notification = builder.Build ();
// Get the notification manager:
NotificationManager notificationManager =
GetSystemService (NotificationService) as NotificationManager;
// Publish the notification:
const int notificationId = 0;
notificationManager.Notify (notificationId, notification);
} else {
ProximityClose ();
}
break;
case ProximityType.Far:
ProximityFar ();
break;
case ProximityType.Unknown:
break;
}
}
}
void ExitedRadius (object sender, MonitorEventArgs e){
RadiusExit ();
}
void EnteredRadius (object sender, MonitorEventArgs e){
RadiusEnter ();
}
public void OnIBeaconServiceConnect(){
BeaconManager.SetMonitorNotifier(MonitoringNotifier);
BeaconManager.SetRangeNotifier(RangeNotifier);
BeaconManager.StartMonitoringBeaconsInRegion(MonitoringRegion);
BeaconManager.StartRangingBeaconsInRegion(RangingRegion);
}
}
}
| 801edc624fb8cd77365a5700449666100bf63c7b | [
"C#"
] | 3 | C# | CMorooney/SpyTag2 | fe4249711fda2ef6fb88b65d92d6e2d339c520d3 | d5a2ab828fc7af40d44d9f3018a43d16fdc27759 | |
refs/heads/main | <file_sep>let travelers = [
{
"id": 15,
"name": "<NAME>",
"travelerType": "relaxer"
},
{
"id": 26,
"name": "<NAME>",
"travelerType": "thrill-seeker"
},
{
"id": 13,
"name": "<NAME>",
"travelerType": "shopper"
},
{
"id": 43,
"name": "<NAME>",
"travelerType": "photographer"
},
{
"id": 95,
"name": "<NAME>",
"travelerType": "thrill-seeker"
},
{
"id": 63,
"name": "<NAME>",
"travelerType": "shopper"
},
{
"id": 27,
"name": "<NAME>",
"travelerType": "relaxer"
},
{
"id": 81,
"name": "<NAME>",
"travelerType": "history buff"
},
{
"id": 59,
"name": "<NAME>",
"travelerType": "relaxer"
},
{
"id": 100,
"name": "<NAME>",
"travelerType": "relaxer"
}
]
let trips = [
{
"id": 1,
"userID": 15,
"destinationID": 44,
"travelers": 1,
"date": "2021/04/20",
"duration": 4,
"status": "approved",
"suggestedActivities": ["swimming"],
"estimatedLodgingCostPerDay": 450,
"estimatedFlightCostPerPerson": 80
}, {
"id": 2,
"userID": 15,
"destinationID": 22,
"travelers": 4,
"date": "2020/10/04",
"duration": 18,
"status": "pending",
"suggestedActivities": ["swim with dolphins"],
"estimatedLodgingCostPerDay": 90,
"estimatedFlightCostPerPerson": 650
}, {
"id": 3,
"userID": 15,
"destinationID": 12,
"travelers": 3,
"date": "2020/05/22",
"duration": 17,
"status": "pending",
"suggestedActivities": ["swimming"],
"estimatedLodgingCostPerDay": 150,
"estimatedFlightCostPerPerson": 1200
}, {
"id": 43,
"userID": 15,
"destinationID": 14,
"travelers": 2,
"date": "2022/02/25",
"duration": 10,
"status": "approved",
"suggestedActivities": ["zip-lining"],
"estimatedLodgingCostPerDay": 70,
"estimatedFlightCostPerPerson": 830
}, {
"id": 15,
"userID": 95,
"destinationID": 29,
"travelers": 3,
"date": "2020/04/30",
"duration": 18,
"status": "approved",
"suggestedActivities": ["hiking"],
"estimatedLodgingCostPerDay": 80,
"estimatedFlightCostPerPerson": 1100
}, {
"id": 23,
"userID": 27,
"destinationID": 35,
"travelers": 3,
"date": "2020/06/29",
"duration": 9,
"status": "approved",
"suggestedActivities": ["swimming"],
"estimatedLodgingCostPerDay": 80,
"estimatedFlightCostPerPerson": 1100
}, {
"id": 7,
"userID": 27,
"destinationID": 17,
"travelers": 5,
"date": "2020/5/28",
"duration": 20,
"status": "approved",
"suggestedActivities": ["hiking"],
"estimatedLodgingCostPerDay": 30,
"estimatedFlightCostPerPerson": 1200
}, {
"id": 4,
"userID": 81,
"destinationID": 39,
"travelers": 6,
"date": "2021/02/07",
"duration": 4,
"status": "approved",
"suggestedActivities": ["bike-riding"],
"estimatedLodgingCostPerDay": 995,
"estimatedFlightCostPerPerson": 90
}, {
"id": 80,
"userID": 2,
"destinationID": 19,
"travelers": 5,
"date": "2019/12/19",
"duration": 19,
"status": null,
"suggestedActivities": null,
"estimatedLodgingCostPerDay": 60,
"estimatedFlightCostPerPerson": 500
}, {
"id": 70,
"userID": 100,
"destinationID": 50,
"travelers": 6,
"date": "2017/07/23",
"duration": 17,
"status": "approved",
"suggestedActivities": ["reading"],
"estimatedLodgingCostPerDay": 1400,
"estimatedFlightCostPerPerson": 75
}
]
export { travelers, trips }
<file_sep>const dayjs = require('dayjs')
dayjs().format()
const isBetween = require('dayjs/plugin/isBetween');
dayjs.extend(isBetween);
class Traveler {
constructor(travelerInfo) {
this.id = travelerInfo.id;
this.name = travelerInfo.name;
this.travelerType = travelerInfo.travelerType;
this.myTrips;
this.myCurrentTrip;
this.myPastTrips = [];
this.myFutureTrips = [];
this.myPendingTrips = [];
this.myTripsInLastYear = [];
}
sortMyTrips(myTrips) {
this.myTrips = myTrips;
this.findPendingTrips();
const todaysDate = dayjs().format('YYYY/MM/DD');
this.sortByType(todaysDate, myTrips);
}
findPendingTrips() {
const pendingTrips = this.myTrips.filter(trip => trip.status === 'pending');
this.myPendingTrips = pendingTrips;
}
sortByType(date, trips) {
trips.forEach(trip => {
let endDate = dayjs(trip.date).add(trip.duration, 'day').format('YYYY/MM/DD');
if (trip.status !== 'pending') {
if (dayjs(date).isBetween(trip.date, endDate, null, [])) {
this.myCurrentTrip = trip;
} else if (dayjs(date).isBefore(trip.date)) {
this.myFutureTrips.push(trip);
} else if (dayjs(endDate).isBefore(date)) {
this.myPastTrips.push(trip);
}
}
})
}
findTripsInLastYear(todaysDate) {
const oneYearAgo = dayjs(todaysDate).subtract(1, 'year').format('YYYY/MM/DD');
this.myTrips.forEach(trip => {
if (dayjs(trip.date).isBetween(oneYearAgo, todaysDate, null, [])) {
this.myTripsInLastYear.push(trip);
}
})
}
calculateSpentOnTripsThisYear(todaysDate) {
this.findTripsInLastYear(todaysDate);
if (!this.myTripsInLastYear.length) {
return "You haven't traveled with us recently! We'd love to help you book your next trip!";
} else {
const cost = this.myTripsInLastYear.reduce((sum, trip) => {
sum += (trip.duration * trip.estimatedLodgingCostPerDay)
+ (trip.travelers * trip.estimatedFlightCostPerPerson);
return sum;
}, 0)
const totalCost = (cost * 0.1) + cost;
const finalCost = Number.parseFloat(totalCost).toFixed(2);
return `You spent $${finalCost} on trips in the last year!`;
}
}
addTrip(property, trip) {
this[property].push(trip);
}
}
export default Traveler;
<file_sep>const dayjs = require('dayjs')
dayjs().format()
const isBetween = require('dayjs/plugin/isBetween');
dayjs.extend(isBetween);
const domUpdates = {
displayUserNameErrorMessage() {
document.getElementById('userNameErrorMessage').innerText = "Please try your username again!";
},
clearUserNameErrorMessage() {
document.getElementById('userNameErrorMessage').innerText = "";
},
displayPasswordErrorMessage() {
document.getElementById('passwordErrorMessage').innerText = "Please try your password again!";
},
greetUser(traveler) {
let firstName = traveler.name.split(' ')[0];
document.getElementById('userGreeting').innerHTML = `Welcome back, ${firstName}!`;
},
hideLogInForm() {
document.querySelector('.user-sign-in').classList.add('hidden');
document.getElementById('userDashboard').classList.remove('hidden');
document.getElementById('userGreeting').classList.remove('hidden');
},
displayTrips(traveler) {
const myTripsDisplay = document.getElementById('tripsDisplayArea');
const userPastTrips = document.getElementById('userPastTrips');
const userPresentTrip = document.getElementById('userPresentTrip');
const userFutureTrips = document.getElementById('userUpcomingTrips');
const userPendingTrips = document.getElementById('userPendingTrips');
if (!traveler.myTrips) {
myTripsDisplay.innerText = "Book your first trip with us!";
}
if (traveler.myPastTrips.length === 0) {
userPastTrips.innerText = "😯 You don't have any past trips! 😯";
} else if (traveler.myPastTrips) {
domUpdates.renderUserTripCards(traveler.myPastTrips, userPastTrips);
}
if (!traveler.myCurrentTrip) {
userPresentTrip.innerText = "🏠 You're stuck at home for now! 🏠";
} else if (traveler.myCurrentTrip) {
domUpdates.renderUserTripCards(traveler.myCurrentTrip, userPresentTrip);
}
if (traveler.myFutureTrips.length === 0) {
userFutureTrips.innerText = "😩 You don't have any upcoming trips! 😩";
} else if (traveler.myFutureTrips) {
domUpdates.renderUserTripCards(traveler.myFutureTrips, userFutureTrips);
}
if (traveler.myPendingTrips.length === 0) {
userPendingTrips.innerText = "😭 You don't have any pending trips! 😭";
} else {
domUpdates.renderUserTripCards(traveler.myPendingTrips, userPendingTrips);
}
},
renderUserTripCards(filteredTrips, pageArea) {
pageArea.innerHTML = '';
let tripCardsToDisplay = '';
filteredTrips.forEach(trip => {
let endDate = dayjs(trip.date).add(trip.duration, 'day').format('YYYY/MM/DD');
tripCardsToDisplay +=
`<article class="card">
<div class="upper-card">
<img class="location-pic" src=${trip.image} alt=${trip.alt}>
</div>
<div class="lower-card">
<p class="location">${trip.destination}</p>
<p class="dates">${trip.date} - ${endDate}</p>
</div>
</article>`
})
pageArea.innerHTML = tripCardsToDisplay;
},
displayTotalSpent(traveler) {
const todaysDate = dayjs().format('YYYY/MM/DD');
const totalAmount = traveler.calculateSpentOnTripsThisYear(todaysDate);
document.getElementById('moneySpent').innerText = totalAmount;
},
displayDestinationCards(destinationData) {
let destinationCardsToDisplay = '';
destinationData.forEach(trip => {
destinationCardsToDisplay +=
`<article class="card">
<div class="upper-card">
<img class="location-pic" src=${trip.image} alt=${trip.alt}>
</div>
<div class="lower-card">
<p class="location">${trip.destination}</p>
</div>
</article>`
});
document.getElementById('destinationsDisplay').innerHTML = destinationCardsToDisplay;
},
displayDestinationDropdownOptions(destinationData) {
let destinationDropdownOptions = '';
destinationData.forEach(trip => {
destinationDropdownOptions +=
`<option value="${trip.destination}">`
});
document.getElementById('destinations').innerHTML = destinationDropdownOptions;
},
displayTripEstimate(estimate) {
document.getElementById('tripEstimate').innerText = `This trip will cost $${estimate}, including agent's fee. Book now!`;
},
displayDateErrorMessage(date) {
document.getElementById('tripEstimate').innerText = `Please choose a departure date after ${date}`;
},
displayNumberErrorMessage(category) {
if (category === 'durationInput') {
document.getElementById('tripEstimate').innerText = "Please enter a valid number for duration of trip.";
}
if (category === 'travelersInput') {
document.getElementById('tripEstimate').innerText = "Please enter a valid number for number of travelers.";
}
},
displayDestinationErrorMessage() {
document.getElementById('tripEstimate').innerText = "Please choose a valid destination."
},
clearErrorMessage() {
document.getElementById('tripEstimate').innerText = '';
},
displayTripEstimateErrorMessage() {
document.getElementById('tripEstimate').innerText = "Sorry, something went wrong! Please check your request inputs again!";
},
enableRequestButton() {
if (document.querySelector('.submit-request').classList.contains('disable')) {
document.querySelector('.submit-request').classList.remove('disable');
}
},
toggleElement(element) {
if (document.querySelector(element).classList.contains('disable')) {
document.querySelector(element).classList.remove('disable');
} else {
document.querySelector(element).classList.add('disable');
}
},
displayRequestSubmittedMessage() {
document.getElementById('tripEstimate').innerText = 'Your trip has been submitted for approval by an agent!';
domUpdates.toggleElement('.submit-request');
domUpdates.toggleElement('.request-trip-form');
},
clearForm() {
document.getElementById('tripEstimate').innerText = ''
document.querySelector('.request-trip-form').reset();
domUpdates.toggleElement('.request-trip-form');
},
displayCallUsErrorMessage() {
document.getElementById('tripEstimate').innerText = 'We\'re sorry, something went wrong with your request. Please try again later or call one of our travel agents at 1-800-555-5555!';
},
displayServerIsDownMessage() {
document.getElementById('userGreeting').innerText = 'We\'re sorry, something went wrong! Please try again later or call one of our travel agents at 1-800-555-5555!';
document.getElementById('userDashboard').classList.add('hidden')
}
}
export default domUpdates;
<file_sep>import Traveler from './traveler.js';
import Trip from './trip.js';
import domUpdates from './domUpdates.js';
import { fetchAllData, addNewTrip } from './networkRequests';
const dayjs = require('dayjs');
dayjs().format();
const isBetween = require('dayjs/plugin/isBetween');
dayjs.extend(isBetween);
//*******MEDIA QUERIES********//
const signInButton = document.getElementById('signIn');
const dateInput = document.getElementById('dateInput');
const tripDurationInput = document.getElementById('durationInput');
const numberOfTravelersInput = document.getElementById('travelersInput');
const destinationInput = document.getElementById('destinationInput');
const getEstimateButton = document.querySelector('.get-trip-estimate');
const submitRequestButton = document.querySelector('.submit-request');
let trips, destinations, singleTraveler, currentTraveler, pendingTrip;
//*******Event Listeners******//
signInButton.addEventListener('click', validateUserName);
destinationInput.addEventListener('keyup', filterDestinationsBySearch);
destinationInput.addEventListener('blur', checkDestinationInput);
dateInput.addEventListener('blur', checkDateInput);
tripDurationInput.addEventListener('blur', function() {
checkNumbersInput('durationInput')
});
numberOfTravelersInput.addEventListener('blur', function() {
checkNumbersInput('travelersInput')
});
getEstimateButton.addEventListener('click', validateFormInputs);
submitRequestButton.addEventListener('click', submitNewTripRequest);
//*******Functions******//
function validateUserName() {
const userNameInput = document.getElementById('userName').value;
const result = userNameInput.split('traveler');
let userID;
if (!result[0]) {
userID = result[1];
}
if (0 < userID && userID < 51) {
validatePassword(userID)
domUpdates.clearUserNameErrorMessage();
} else {
domUpdates.displayUserNameErrorMessage();
}
}
function validatePassword(userID) {
const passwordInput = document.getElementById('password').value;
if (passwordInput === '<PASSWORD>') {
onPageLoad(userID);
domUpdates.hideLogInForm();
} else {
domUpdates.displayPasswordErrorMessage();
}
}
function onPageLoad(userID) {
fetchAllData(userID)
.then(allData => {
trips = allData.tripsData
destinations = allData.destinationsData
singleTraveler = allData.singleTravelerData
combineDataSets(trips, destinations)
currentTraveler = new Traveler(singleTraveler)
filterTripsByTraveler(singleTraveler.id)
domUpdates.greetUser(currentTraveler)
domUpdates.displayTrips(currentTraveler)
domUpdates.displayTotalSpent(currentTraveler)
domUpdates.displayDestinationCards(destinations.destinations)
domUpdates.displayDestinationDropdownOptions(destinations.destinations)
});
}
function combineDataSets(tripData, destinationData) {
const result = tripData.trips.map(trip => {
destinationData.destinations.forEach(destination => {
if (trip.destinationID === destination.id) {
trip['estimatedLodgingCostPerDay'] = destination.estimatedLodgingCostPerDay;
trip['estimatedFlightCostPerPerson'] = destination.estimatedFlightCostPerPerson;
trip['image'] = destination.image;
trip['alt'] = destination.alt;
trip['destination'] = destination.destination;
}
})
return trip;
})
trips = result;
}
function filterTripsByTraveler(travelerID) {
const myTrips = trips.filter(trip => trip.userID === travelerID);
currentTraveler.sortMyTrips(myTrips);
}
function filterDestinationsBySearch(e) {
let searchText = e.target.value.toLowerCase();
let filteredDestinations = [];
destinations.destinations.forEach(location => {
if (location.destination.toLowerCase().includes(searchText)) {
filteredDestinations.push(location)
}
domUpdates.displayDestinationCards(filteredDestinations);
})
}
function checkDateInput() {
const startDate = dateInput.value;
const todaysDate = dayjs().format('YYYY-MM-DD');
if (dayjs(startDate).isBefore(todaysDate)) {
domUpdates.displayDateErrorMessage(todaysDate);
} else {
domUpdates.clearErrorMessage();
return true;
}
}
function checkNumbersInput(inputType) {
const input = document.getElementById(inputType).value;
const result = input.split('').map(num => parseInt(num));
if (result.includes(NaN) || (!input)) {
domUpdates.displayNumberErrorMessage(inputType);
} else {
domUpdates.clearErrorMessage();
return true;
}
}
function checkDestinationInput() {
const city = document.getElementById('destinationInput').value;
const allCities = destinations.destinations.map(location => location.destination)
if (!allCities.includes(city)) {
domUpdates.displayDestinationErrorMessage();
} else {
domUpdates.clearErrorMessage();
return true;
}
}
function validateFormInputs() {
if (checkDateInput() && checkNumbersInput('durationInput') && checkNumbersInput('travelersInput') && checkDestinationInput()) {
calculateTripEstimate();
} else {
domUpdates.displayTripEstimateErrorMessage();
}
}
function calculateTripEstimate() {
const startDate = document.getElementById('dateInput').value;
const duration = document.getElementById('durationInput').value;
const numTravelers = document.getElementById('travelersInput').value;
const destination = document.getElementById('destinationInput').value;
let locationID, estimatedLodging, estimatedFlight;
trips.forEach(trip => {
if (trip.destination === destination) {
locationID = trip.destinationID;
estimatedLodging = trip.estimatedLodgingCostPerDay;
estimatedFlight = trip.estimatedFlightCostPerPerson;
}
});
const tripData =
{
id: trips.length + 1,
userID: currentTraveler.id,
destinationID: locationID,
travelers: parseInt(numTravelers),
date: startDate,
duration: parseInt(duration),
estimatedLodgingCostPerDay: estimatedLodging,
estimatedFlightCostPerPerson: estimatedFlight
};
pendingTrip = new Trip(tripData);
const pendingTripEstimate = pendingTrip.estimateTripCost();
if (!pendingTripEstimate) {
domUpdates.toggleElement('.request-trip-form');
domUpdates.displayCallUsErrorMessage();
setResetTimer();
domUpdates.displayDestinationCards(destinations.destinations);
} else {
domUpdates.displayTripEstimate(pendingTripEstimate);
domUpdates.enableRequestButton();
}
}
function submitNewTripRequest() {
const tripObject = ( {id: pendingTrip.id,
userID: pendingTrip.userID,
destinationID: pendingTrip.destinationID,
travelers: pendingTrip.travelers,
date: pendingTrip.date.split('-').join('/'),
duration: pendingTrip.duration,
status: 'pending',
suggestedActivities: []
} );
addNewTrip(tripObject)
.then(response => {
updatePendingTrip(pendingTrip)
currentTraveler.addTrip('myTrips', pendingTrip)
currentTraveler.addTrip('myPendingTrips', pendingTrip)
trips.push(pendingTrip)
domUpdates.displayTrips(currentTraveler)
domUpdates.displayRequestSubmittedMessage()
})
setResetTimer();
domUpdates.displayDestinationCards(destinations.destinations);
}
function updatePendingTrip(trip) {
destinations.destinations.forEach(destination => {
if (destination.id === trip.destinationID) {
trip['image'] = destination.image;
trip['alt'] = destination.alt;
trip['destination'] = destination.destination;
}
});
}
function setResetTimer() {
setTimeout(function() {domUpdates.clearForm() }, 7000);
}
<file_sep>class Trip {
constructor(tripData) {
this.id = tripData.id;
this.userID = tripData.userID;
this.destinationID = tripData.destinationID;
this.travelers = tripData.travelers;
this.date = tripData.date;
this.duration = tripData.duration;
this.status = tripData.status || 'pending';
this.suggestedActivities = tripData.suggestedActivities || [];
this.estimatedLodgingCostPerDay = tripData.estimatedLodgingCostPerDay;
this.estimatedFlightCostPerPerson = tripData.estimatedFlightCostPerPerson;
}
estimateTripCost() {
let totalFlightCost = this.travelers * this.estimatedFlightCostPerPerson;
let totalLodgingCost = this.estimatedLodgingCostPerDay * this.duration;
let totalCost = totalFlightCost + totalLodgingCost;
let total = (totalCost * 0.1) + totalCost;
let finalCost = Number(total).toFixed(2);
return parseInt(finalCost);
}
}
export default Trip;
<file_sep># Welcome to Wanderlust Excursions!
- [Description](#description)
- [Installation](#installation)
- [How To Use The Website](#how_to_use_the_website)
- [Technologies Used](#technologies_used)
- [Future Consideration](#future_considerations)
- [Contributor](#contributor)
### Description
The goal of this project was to design a user interface with accessibility in mind, while accessing and sending data from a remote API. Object oriented programming and test driven development were utilized to ensure a properly functioning application.
### Installation
To use this application, follow these instructions:
* From your terminal, clone the repo.
`$git clone <EMAIL>:sarahlane8/Travel-Tracker.git`
* Move into the root directory of the project.
* Install the required npm dependencies.
`$npm install`
* Start the local server
`$npm start`
* You must also clone down the Travel-Tracker API for the application to function properly. Open a separate tab in your terminal.
`$git clone <EMAIL>:turingschool-examples/travel-tracker-api.git`
* Move into the root directory
* `$npm install`
* Start the local API server
* `$npm start`
* Open your browser [here](http://localhost:8080/) to be taken to the log in page
* You're ready to go!
### How To Use The Website
Log in to the website. All usernames begin with `traveler` followed by a number, which is the traveler's ID. There are 50 travelers registered with the website, so any one of them may sign in. Typing in `traveler20`, for example, will fetch Gregg's information. All travelers have the same password of `<PASSWORD>`. Usernames and passwords are case-sensitive, and a message will appear if the username and password do not meet the correct criteria.
<p align="center">
<img src="https://media.giphy.com/media/3fqVJ73tvp2ErwrKXz/giphy.gif" alt="logIn demonstration"/>
</p>
The user dashboard greets the user at the top and displays their trips on the left side of the screen, separated by past trips, a current trip they may be on, upcoming trips, and any trips that are still pending an agent's approval. It also displays how much money they have spent on trips in the last calendar year, if they have traveled during that time. In the center, a request trip form exists so they may book future trips. On the right, pictures of all the destinations that Wanderlust Excursions books to are available.
<p align="center">
<img height=250px; width = 500px; alt="UserDashboard" src="https://user-images.githubusercontent.com/70901622/116112002-640e2d80-a674-11eb-8bbb-89d6d3b9549d.png">
</p>
The request trip form allows a user to choose a start date, how many days they would like to go for, how many people are going, and where they would like to go. They may start typing a location in the input field, and the destination cards on the right will filter down as they become more specific with their location, and then they may choose it from the drop down. Clicking the estimate cost button will display how much the trip will be, including an agent's fee.
After they submit a trip request, the form will clear, and the pending trip will display on the user's dashboard.
<p align="center">
<img src="https://media.giphy.com/media/VvTOx6yH5Yl6Mu1SqU/giphy.gif" alt="request a trip demo"/>
</p>
Several breakpoints were utilized to ensure the website would be accessible across devices.
<p align="center">
<img src="https://media.giphy.com/media/yzeANV4boStFPKNild/giphy.gif" alt="breakpoints demonstration" />
</p>
### Technologies Used
* mocha
* chai
* Sass
* fetch API
* dayjs
* Webpack
* JavaScript
### Future Considerations
* Adding an agent login and dashboard to approve pending trips
* Allowing the trip cards to be clicked on to open up more information about that trip
* Allowing a user to upload photos associated with each trip, and notes about activities they did on teach trip
### Contributor
* <NAME> [GitHub Profile](https://github.com/sarahlane8)
* [Project Repository](https://github.com/sarahlane8/Travel-Tracker)
<file_sep>import domUpdates from './domUpdates.js';
import './scripts.js'
//************FETCHING ALL DATA************//
const fetchAllData = id => {
const tripsData = fetch('http://localhost:3001/api/v1/trips')
.then(response => response.json())
.then(tripsData => {
return tripsData;
});
const destinationsData = fetch('http://localhost:3001/api/v1/destinations')
.then(response => response.json())
.then(destinationsData => {
return destinationsData;
});
const singleTravelerData = fetch(`http://localhost:3001/api/v1/travelers/${id}`)
.then(response => response.json())
.then(singleTravelerData => {
return singleTravelerData;
});
return Promise.all([tripsData, destinationsData, singleTravelerData])
.then(data => {
const allData = {}
allData.tripsData = data[0]
allData.destinationsData = data[1]
allData.singleTravelerData = data[2]
return allData;
})
.catch(err => domUpdates.displayServerIsDownMessage());
}
//************FETCHING A SINGLE TRAVELER'S DATA************//
const fetchSingleTravelerData = id => {
const singleTravelerData = fetch(`http://localhost:3001/api/v1/travelers/${id}`)
.then(response => response.json())
.then(singleTravelerData => {
return singleTravelerData;
})
.catch(err => domUpdates.displayServerIsDownMessage());
};
//************FETCHING ALL TRAVELERS' DATA************//
//************ADDING A NEW TRIP FOR APPROVAL************//
const addNewTrip = tripObject => {
return fetch('http://localhost:3001/api/v1/trips', {
method: 'POST',
body: JSON.stringify(tripObject),
headers: {
'Content-Type': 'application/json'
}
})
.then(response => response.json())
.then(response => {
if (response.message === `Trip with id ${tripObject.id} successfully posted`) {
return response;
}
if (response.message === "You are missing a required parameter of destinationID") {
domUpdates.displayServerIsDownMessage();
}
})
.catch(err => domUpdates.displayServerIsDownMessage());
}
export { fetchAllData, fetchSingleTravelerData, addNewTrip };
| 1426d967a192749e13fc887a112c36e813dd5a8f | [
"JavaScript",
"Markdown"
] | 7 | JavaScript | sarahlane8/Travel-Tracker | c5f577a10af4a9742cca88cbbdac69619f42ebd2 | 8c464fa73db1fa03fdf02fed766d749daf063b6a | |
refs/heads/master | <file_sep>#ifndef MASHINE_H
#define MASHINE_H
class mashine
{
public:
mashine();
~mashine();
};
#endif // MASHINE_H
<file_sep>#include "mashine.h"
mashine::mashine()
{
}
mashine::~mashine()
{
}
<file_sep>#include "mainwindow.h"
#include <QApplication>
#include <QtCore>
#include <QVector>
#include <QPointF>
#define BIG_NUMBER 9999999
int main(int argc, char *argv[])
{
auto getRand = []()->double {
return static_cast<double>(((double)(qrand() % BIG_NUMBER)) / BIG_NUMBER);
};
auto generateDots = [&getRand](int size, const QPointF ¢er, double r) -> QVector<QPointF> {
QVector<QPointF> points;
for(int i = 0; i < size; ++i) {
double x1 = center.x() + getRand()*r;
double x2 = center.y() + getRand()*r;
points.append(QPoint(x1, x2));
}
return points;
};
auto countWeightsAndT = [](const QVector<QPointF> &poistsA, const QVector<QPointF> &pointB, double &w1, double &w2)->double {
QVector<QPointF> points;
for (int i = 0; i < points1.size(); i++) {
points << points1[i] << points2[i];
}
auto alf = static_cast<double>(0.01);
//learning
int curEpoch = 0;
bool goodEpoch = false;
while(!goodEpoch) {
goodEpoch = true;
for(int curPoint = 0; curPoint < points.size(); ++curPoint) {
auto S = points.at(curPoint).x()*w1 + points.at(curPoint).y()*w2 - T;
auto y = (S<=0) ? 0 : 1;
int e;
if (curPoint % 2 == 0) {
e = 0;
} else {
e = 1;
}
int K = y - e;
w1 = w1 - alf * points.at(curPoint).x() * (K);
w2 = w2 - alf * points.at(curPoint).y() * (K);
T = T + alf * (K);
if (K != 0) {
goodEpoch = false;
}
}
curEpoch++;
}
};
//1) initiaization
qsrand(QTime::currentTime().second());
double w1 = getRand();
double w2 = getRand();
double T = getRand();
auto points1 = generateDots(20, QPointF(10, 10), 4);
auto points2 = generateDots(20, QPointF(20, 20), 5);
auto points3 = generateDots(20, QPointf(30, 30), 5);
QVector<QPointF> points;
for (int i = 0; i < points1.size(); i++) {
points << points1[i] << points2[i];
}
auto alf = static_cast<double>(0.01);
//learning
int curEpoch = 0;
bool goodEpoch = false;
while(!goodEpoch) {
goodEpoch = true;
for(int curPoint = 0; curPoint < points.size(); ++curPoint) {
auto S = points.at(curPoint).x()*w1 + points.at(curPoint).y()*w2 - T;
auto y = (S<=0) ? 0 : 1;
int e;
if (curPoint % 2 == 0) {
e = 0;
} else {
e = 1;
}
int K = y - e;
w1 = w1 - alf * points.at(curPoint).x() * (K);
w2 = w2 - alf * points.at(curPoint).y() * (K);
T = T + alf * (K);
if (K != 0) {
goodEpoch = false;
}
}
curEpoch++;
}
double a = T/w2;
double b = -w1/w2;
printf("w1: %lf \n w2: %lf \n T: %lf \n", w1, w2, T);
printf("a: %lf \n b: %lf \n", a, b);
printf("Current epoch: %d\n", curEpoch);
}
| 36e198a9690db732a26da9e994e81fbfcd08252b | [
"C++"
] | 3 | C++ | lexgolubev/MashineLearining1 | 5e2a8b96149c957c1ed4ff35636eb686da03da14 | 9a8492e97f52b9d9a3df98a395d85a169c0f4392 | |
refs/heads/master | <file_sep>const fn get_flatc_name() -> &'static str {
#[cfg(target_os = "windows")]
return "flatc.exe";
#[cfg(target_os = "macos")]
return "flatc_osx";
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
return "flatc_linux";
}
fn main() {
let mut path = std::env::current_dir().unwrap();
path.push("..");
path.push(get_flatc_name());
println!("cargo:rustc-env=FLATC={}", path.display());
}
<file_sep>[package]
name = "flatbuffers-run"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
[dependencies]
bitflags = "1.2.1"<file_sep>[package]
name = "flatbuffers_builder"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
[dependencies]
flatbuffers-run = { path = "../flatbuffers-run" }
flatbuffers = "0.6.0"
[build-dependencies]
flatbuffers-run = { path = "../flatbuffers-run" }
<file_sep>[package]
name = "flatbuffers-build"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
[dependencies]
flatbuffers_builder = { path = "./flatbuffers-builder" }
[workspace]
members = [
"flatbuffers-builder",
"flatbuffers-run",
]<file_sep>extern crate flatbuffers_run;
fn main() {
let out_dir = std::env::current_dir().unwrap();
if out_dir.join("reflection_generated.rs").exists() {
return;
}
if let Ok(src_path) = std::env::var("FBS_REFLECTION_SRC") {
let mut runner = flatbuffers_run::Runner::new();
runner
.rust(true)
.add_definition(&src_path)
.out_dir(&out_dir);
runner.compile().expect("Failed to compile fbs reflection");
} else {
panic!("Faild to acquire reflection source. Specify this with FBS_REFLECTION_SRC env var");
}
}
<file_sep>#[path = "../reflection_generated.rs"]
pub mod fbs_schema;
use std::ffi::OsStr;
pub trait ServiceGenerator {
fn write_service<'a>(
&mut self,
writer: &mut dyn std::io::Write,
schema: fbs_schema::reflection::Schema<'a>,
) -> std::io::Result<()>;
}
pub struct Builder {
runner: flatbuffers_run::Runner,
generator: Option<Box<dyn ServiceGenerator>>,
}
impl Builder {
pub fn new<T: ?Sized + AsRef<OsStr>>(out_dir: &T) -> Self {
Builder {
runner: {
let mut runner = flatbuffers_run::Runner::new();
runner.out_dir(out_dir);
runner
},
generator: None,
}
}
pub fn add_definition<T: ?Sized + AsRef<OsStr>>(&mut self, definition: &T) -> &mut Self {
self.runner.add_definition(definition);
self
}
pub fn add_definitions<T: ?Sized + AsRef<OsStr>>(&mut self, definitions: &[&T]) -> &mut Self {
self.runner.add_definitions(definitions);
self
}
pub fn add_include<T: ?Sized + AsRef<OsStr>>(&mut self, include: &T) -> &mut Self {
self.runner.add_include(include);
self
}
pub fn add_includes<T: ?Sized + AsRef<OsStr>>(&mut self, includes: &[&T]) -> &mut Self {
self.runner.add_includes(includes);
self
}
pub fn generator(&mut self, generator: Box<dyn ServiceGenerator>) -> &mut Self {
self.generator = Some(generator);
self
}
pub fn generate(mut self) -> std::io::Result<()> {
self.runner.rust(true);
if self.generator.is_some() {
self.runner.schema(true);
}
for def in self.runner.get_definitions() {
println!("cargo:rerun-if-changed={}", def.to_str().unwrap());
}
let generateds = self.runner.compile()?;
if self.generator.is_none() {
return Ok(());
}
let mut generator = self.generator.unwrap();
let mut schema_buffer: Vec<u8> = Default::default();
for item in generateds {
use std::fs::{File, OpenOptions};
use std::io::{Read, Write};
let schema_path = item.schema.unwrap();
let schema = {
let mut schema = File::open(schema_path.clone())?;
schema.read_to_end(&mut schema_buffer)?;
fbs_schema::reflection::get_root_as_schema(&schema_buffer)
};
if schema.services().is_none() {
continue;
}
let mut src = {
let src_path = item.rust.unwrap();
let mut src = OpenOptions::new();
src.append(true).open(src_path)?
};
writeln!(src)?;
generator.write_service(&mut src, schema)?;
src.flush()?;
drop(src);
std::fs::remove_file(schema_path)?;
}
Ok(())
}
}
<file_sep>extern crate bitflags;
use bitflags::bitflags;
use std::ffi::OsStr;
use std::path::PathBuf;
const FLATC_PATH: &'static str = env!("FLATC");
bitflags! {
pub struct CompileFlags: u32 {
const RUST = 0b00000001;
const SCHEMA = 0b00000010;
}
}
pub struct Runner {
flags: CompileFlags,
definitions: Vec<PathBuf>,
includes: Vec<PathBuf>,
out_dir: Option<PathBuf>,
}
pub struct CompileResult {
pub rust: Option<PathBuf>,
pub schema: Option<PathBuf>,
}
impl Runner {
pub fn new() -> Self {
Runner {
flags: CompileFlags::empty(),
definitions: Default::default(),
includes: Default::default(),
out_dir: None,
}
}
pub fn get_definitions(&self) -> &[PathBuf] {
self.definitions.as_slice()
}
pub fn add_definition<T: ?Sized + AsRef<OsStr>>(&mut self, definition: &T) -> &mut Self {
self.definitions.push(PathBuf::from(definition));
self
}
pub fn add_definitions<T: ?Sized + AsRef<OsStr>>(&mut self, definitions: &[&T]) -> &mut Self {
self.definitions
.extend(definitions.iter().map(|s| PathBuf::from(*s)));
self
}
pub fn add_include<T: ?Sized + AsRef<OsStr>>(&mut self, include: &T) -> &mut Self {
self.includes.push(PathBuf::from(include));
self
}
pub fn add_includes<T: ?Sized + AsRef<OsStr>>(&mut self, includes: &[&T]) -> &mut Self {
self.includes
.extend(includes.iter().map(|s| PathBuf::from(*s)));
self
}
pub fn out_dir<T: ?Sized + AsRef<OsStr>>(&mut self, dir: &T) -> &mut Self {
self.out_dir = Some(PathBuf::from(dir));
self
}
pub fn rust(&mut self, enable: bool) -> &mut Self {
self.flags.set(CompileFlags::RUST, enable);
self
}
pub fn schema(&mut self, enable: bool) -> &mut Self {
self.flags.set(CompileFlags::SCHEMA, enable);
self
}
pub fn compile(self) -> std::io::Result<Vec<CompileResult>> {
use std::process::Command;
let mut command = Command::new(FLATC_PATH);
let gen_rust = self.flags.contains(CompileFlags::RUST);
if gen_rust {
command.arg("--rust");
}
let gen_schema = self.flags.contains(CompileFlags::SCHEMA);
if gen_schema {
command.args(&["--schema", "--binary", "--bfbs-comments", "--bfbs-builtins"]);
}
let out_dir = if let Some(out_dir) = self.out_dir {
command.arg("-o");
command.arg(&out_dir);
out_dir
} else {
std::env::current_dir().unwrap().to_owned()
};
for definition in &self.definitions {
command.arg(definition);
}
let mut child = command.spawn()?;
child.wait()?;
Ok(self
.definitions
.iter()
.map(|def| {
let file_name = std::path::Path::new(&def).file_stem().unwrap();
let file_name = file_name.to_str().unwrap();
CompileResult {
rust: if gen_rust {
Some(out_dir.join(format!("{}_generated.rs", file_name)))
} else {
None
},
schema: if gen_schema {
Some(out_dir.join(format!("{}.bfbs", file_name)))
} else {
None
},
}
})
.collect())
}
}
<file_sep>extern crate flatbuffers_builder;
pub use flatbuffers_builder::*;
| 3d135869a53a1b68614ffd3e127b19711d225528 | [
"TOML",
"Rust"
] | 8 | Rust | Perlmint/flatbuffers-build | 87409744fbd89c4142be92cc270dbd2c0948f60a | 3c647f3e08c6020a2cfb38d9d462be21508d0fd3 | |
refs/heads/master | <repo_name>gnlin/faceRecFW<file_sep>/Sources/fisherldautils.cpp
/* Modified version of Fisher LDA Alogrithm, <NAME> */
#include "fisherldautils.h"
#include "pcautils.h"
void fisher_lda_utils::flda_transfer(std::vector<cv::Mat> &pca_feature_list,
std::vector<cv::Mat> &flda_feature_list,
cv::Mat &flda_transfer_matrix,
int sample_class_number, int sample_per_class,
int dim_ref)
{
/* make sure the input has valid pca feature input, which makes sure every */
/* thing else possible. */
assert(pca_feature_list.size() != 0 && pca_feature_list[0].cols == 1);
std::vector<cv::Mat> mk_matrix;
/* Sb, Between-calss scatter matrix, describles the dispersion between each */
/* class and the total sample set. */
cv::Mat Sb;
/* Sw, Within-class scatter matrix, describles the dispersion within one class's */
/* sample set. */
cv::Mat Sw;
/* calculate mean vectors of each sample set */
calc_mean_vectors(pca_feature_list, mk_matrix, sample_class_number, sample_per_class);
/* calculate Sb, the Between-calss scatter matrix */
calc_between_class_scatter_matrix(Sb, mk_matrix, pca_feature_list, sample_class_number);
/* calculate Sw, the Within-class scatter matrix */
calc_within_class_scatter_matrix(Sw, mk_matrix, pca_feature_list, sample_class_number, sample_per_class);
/* calculate transfer matrix */
calc_transfer_matrix(Sb, Sw, flda_transfer_matrix, dim_ref, sample_class_number);
/* claculate flda features */
for (size_t i = 0; i < pca_feature_list.size(); i++) {
cv::Mat flda_feature;
flda_feature = flda_transfer_matrix * pca_feature_list[i];
flda_feature_list.push_back(flda_feature);
}
}
void fisher_lda_utils::calc_transfer_matrix(cv::Mat &Sb, cv::Mat &Sw,
cv::Mat &flda_transfer_matrix, int dim_ref,
int sample_class_number)
{
cv::Mat Sw_inverse;
cv::Mat SwSb;
cv::Mat eigen_vector_matrix;
cv::Mat eigen_value_matrix;
cv::invert(Sw, Sw_inverse, cv::DECOMP_SVD);
SwSb = Sw_inverse * Sb;
cv::eigen(SwSb, eigen_value_matrix, eigen_vector_matrix);
int range = (sample_class_number < dim_ref) ? sample_class_number : dim_ref;
/* range selection algorithm */
// 1. if (range < 2) range = 2;
if (range < dim_ref) range = 2 * range;
flda_transfer_matrix.create(range, eigen_vector_matrix.cols, CV_64FC1);
for (int y = 0; y < range; y++) {
for (int x = 0; x < eigen_vector_matrix.cols; x++) {
flda_transfer_matrix.at<double>(y, x) =
eigen_vector_matrix.at<double>(y, x);
}
}
}
void fisher_lda_utils::calc_between_class_scatter_matrix(cv::Mat &Sb,
std::vector<cv::Mat> &mk_matrix,
std::vector<cv::Mat> &pca_feature_list,
int sample_class_number)
{
int feature_len = pca_feature_list[0].rows;
/* create matrix Sb */
Sb.create(feature_len, feature_len, CV_64FC1);
Sb.setTo(cv::Scalar(0.0));
/* mean matrix of total sample set */
cv::Mat mean_total;
mean_sample_set(pca_feature_list, mean_total);
/* calculate matrix Sb */
for (int i = 0; i < sample_class_number; i++) {
cv::Mat diff_vec, diff_vec_t;
cv::Mat covariance;
cv::subtract(mk_matrix[i], mean_total, diff_vec);
cv::transpose(diff_vec, diff_vec_t);
covariance = diff_vec * diff_vec_t;
cv::add(Sb, covariance, Sb);
}
for (int x = 0; x < Sb.cols; x++) {
for (int y = 0; y < Sb.rows; y++) {
Sb.at<double>(y, x) = Sb.at<double>(y, x) / sample_class_number;
}
}
}
void fisher_lda_utils::calc_within_class_scatter_matrix(cv::Mat &Sw, std::vector<cv::Mat> &mk_matrix,
std::vector<cv::Mat> &pca_feature_list,
int sample_class_number, int sample_per_class)
{
int feature_len = pca_feature_list[0].rows;
/* create matrix Sw */
Sw.create(feature_len, feature_len, CV_64FC1);
Sw.setTo(cv::Scalar(0.0));
for (int i = 0; i < sample_class_number; i++) {
for (int j = 0; j < sample_per_class; j++) {
cv::Mat diff_vec, diff_vec_t;
cv::Mat covariance;
cv::subtract(mk_matrix[i],
pca_feature_list[ (i * sample_per_class) + j ],
diff_vec);
cv::transpose(diff_vec, diff_vec_t);
covariance = diff_vec * diff_vec_t;
cv::add(Sw, covariance, Sw);
}
}
for (int x = 0; x < Sw.cols; x++) {
for (int y = 0; y < Sw.rows; y++) {
Sw.at<double>(y, x) =
Sw.at<double>(y, x) / (sample_class_number * sample_per_class);
}
}
}
void fisher_lda_utils::calc_mean_vectors(std::vector<cv::Mat> &pca_feature_list,
std::vector<cv::Mat> &mk_matrix,
int sample_class_number, int sample_per_class)
{
int feature_len = pca_feature_list[0].rows;
std::vector<cv::Mat> class_storage;
/* a set of vectors to storage samples of each class at a time*/
for (int i = 0; i < sample_per_class; i++) {
cv::Mat local_matrix;
local_matrix.create(feature_len, 1, CV_64FC1);
class_storage.push_back(local_matrix);
}
/* M1, M2, ..., Mk, mean vector of each class's samples */
for (int i = 0; i < sample_class_number; i++) {
cv::Mat Mk;
for (int j = 0; j < sample_per_class; j++) {
pca_feature_list[ (i * sample_per_class) + j ]
.copyTo(class_storage[j]);
}
mean_sample_set(class_storage, Mk);
mk_matrix.push_back(Mk);
}
}
void fisher_lda_utils::mean_sample_set(std::vector<cv::Mat> &sample_set, cv::Mat &matrix_mean)
{
assert(!sample_set.empty());
int i, x, y;
int sample_size = sample_set.size();
matrix_mean.create(sample_set[0].rows, sample_set[0].cols, sample_set[0].type());
matrix_mean.setTo(cv::Scalar(0.0));
for (i = 0; i < sample_size; i++) {
cv::add(matrix_mean, sample_set[i], matrix_mean);
}
for (x = 0; x < matrix_mean.cols; x++) {
for (y = 0; y < matrix_mean.rows; y++) {
matrix_mean.at<double>(y, x) =
matrix_mean.at<double>(y, x) / sample_size;
}
}
}
<file_sep>/Headers/fisherldautils.h
#ifndef FISHERLDAUTILS_H
#define FISHERLDAUTILS_H
#include "common.h"
namespace fisher_lda_utils {
void flda_transfer(std::vector<cv::Mat> &pca_feature_list,
std::vector<cv::Mat> &flda_feature_list,
cv::Mat &flda_transfer_matrix,
int sample_class_number,
int sample_per_class,
int dim_ref);
void calc_mean_vectors(std::vector<cv::Mat> &pca_feature_list,
std::vector<cv::Mat> &mk_matrix,
int sample_class_number, int sample_per_class);
void calc_between_class_scatter_matrix(cv::Mat &Sb, std::vector<cv::Mat> &mk_matrix,
std::vector<cv::Mat> &pca_feature_list,
int sample_class_number);
void calc_within_class_scatter_matrix(cv::Mat &Sw, std::vector<cv::Mat> &mk_matrix,
std::vector<cv::Mat> &pca_feature_list,
int sample_class_number, int sample_per_class);
void mean_sample_set(std::vector<cv::Mat> &sample_set, cv::Mat &matrix_mean);
void calc_transfer_matrix(cv::Mat &Sb, cv::Mat &Sw,
cv::Mat &flda_transfer_matrix, int dim_ref,
int sample_class_number);
}
#endif // FISHERLDAUTILS_H
<file_sep>/README.md
# faceRecFW
A face recognition program that implements basic human face detection and recognition function.
# Description
I wrote this program two years ago and I did not touch the code since then. The program runs
well and all functions are okay, except a minor bug in facedetector.cpp, which will cause
program crashes, but it happens very rare. I'll re-arrange the code when I have time, and
probably improve the algorithm a bit.
| ae5f7fe223592e97e7f12551d75a465a23b12528 | [
"Markdown",
"C++"
] | 3 | C++ | gnlin/faceRecFW | bc1c1ce7e75f2f987b8f3d5b137256b896a87135 | 2d94a18acf7d803a40d23fdc779a1c52137ac7ba | |
refs/heads/master | <repo_name>Kanakku/Sistemas-Operativos-I<file_sep>/nbproject/private/private.properties
compile.on.save=true
user.properties.file=/home/byron/.netbeans/8.2/build.properties
<file_sep>/src/cliente/servidor/Server.java
package cliente.servidor;
import UI_CliSer.*;
import java.net.*;
import java.io.*;
import java.util.*;
import javax.swing.*;
public class Server extends Thread{
private ServerSocket serverSocket;
//Lista enlazada.
LinkedList<CliHilo> clientes;
private final UI_Servidor test;
private final String puerto;
public Server(String puerto, UI_Servidor test) {
this.puerto=puerto;
this.test=test;
clientes=new LinkedList<>();
this.start();
}
//Mantiene el servidor abierto.
public void run() {
try {
serverSocket = new ServerSocket(Integer.valueOf(puerto));
test.addServidorIniciado();
while (true) {
CliHilo hilo;
Socket socket;
socket = serverSocket.accept();
System.out.println("Nueva conexion: "+socket);
hilo=new CliHilo(socket, this);
hilo.start();
}
} catch (Exception e) {
JOptionPane.showMessageDialog(test, "ERROR al iniciar servidor.");
System.exit(0);
}
}
//Lista con los id de todos los clientes.
LinkedList<String> getUsuariosConectados() {
LinkedList<String>usuariosConectados=new LinkedList<>();
clientes.stream().forEach(con -> usuariosConectados.add(con.getIdentificador()));
return usuariosConectados;
}
void agregarLog(String texto) {
test.agregarLog(texto);
}
}
<file_sep>/src/cliente/servidor/CliHilo.java
package cliente.servidor;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import java.util.LinkedList;
public class CliHilo extends Thread{
private final Socket socket;
private final Server server;
private String idcliente;
private boolean conectado;
//Bloque de memoria para el socket.
private ObjectOutputStream objectOutputStream;
private ObjectInputStream objectInputStream;
public CliHilo(Socket socket,Server server) {
this.server=server;
this.socket = socket;
try {
objectOutputStream = new ObjectOutputStream(socket.getOutputStream());
objectInputStream = new ObjectInputStream(socket.getInputStream());
} catch (IOException ex) {
System.err.println("Error en la inicialización del ObjectOutputStream y el ObjectInputStream");
}
}
public void desconectar() {
try {
socket.close();
conectado=false;
} catch (IOException ex) {
System.err.println("Error al terminar comunicacion con el cliente.");
}
}
public void conectado(){
conectado=true;
while(conectado){
try {
Object aux=objectInputStream.readObject();
if(aux instanceof LinkedList){
ejecutar((LinkedList<String>)aux);
}
} catch (Exception e) {
System.err.println("Error al leer lo enviado por el cliente.");
}
}
}
public void run() {
try{
conectado();
} catch (Exception ex) {
System.err.println("Error llamar el hilo del cliente.");
}
desconectar();
}
//Lo que recibe el socket para ejecutarlo.
public void ejecutar(LinkedList<String> lista){
String tipo=lista.get(0);
switch (tipo) {
case "Entrando conexion":
confirmarConexion(lista.get(1));
break;
case "Entrando desconexion":
confirmarDesConexion();
break;
case "Mensaje":
String destinatario=lista.get(2);
server.clientes.stream().filter(h -> (destinatario.equals(h.getIdentificador())))
.forEach((h) -> h.enviarMensaje(lista));
break;
default:
break;
}
}
//Para enviar el mensaje a travez del socket.
private void enviarMensaje(LinkedList<String> lista){
try {
objectOutputStream.writeObject(lista);
} catch (Exception e) {
System.err.println("Error al enviar el objeto al cliente.");
}
}
//Nuevo cliente conectado, agrega contacto.
private void confirmarConexion(String identificador) {
this.idcliente=identificador;
LinkedList<String> lista=new LinkedList<>();
lista.add("Se acepto conexion");
lista.add(this.idcliente);
lista.addAll(server.getUsuariosConectados());
enviarMensaje(lista);
server.agregarLog("\nNuevo cliente: "+this.idcliente);
LinkedList<String> auxLista=new LinkedList<>();
auxLista.add("Usuario conectado");
auxLista.add(this.idcliente);
server.clientes.stream().forEach(cliente -> cliente.enviarMensaje(auxLista));
server.clientes.add(this);
}
//Cliente desconectador, se borra contacto.
private void confirmarDesConexion() {
LinkedList<String> auxLista=new LinkedList<>();
auxLista.add("Usuario desconectado");
auxLista.add(this.idcliente);
server.agregarLog("\nEl cliente \""+this.idcliente+"\" se ha desconectado.");
this.desconectar();
for(int i=0;i<server.clientes.size();i++){
if(server.clientes.get(i).equals(this)){
server.clientes.remove(i);
break;
}
}
server.clientes.stream().forEach(h -> h.enviarMensaje(auxLista));
}
//Id del cliente en el chat
public String getIdentificador() {
return idcliente;
}
}
| 5997fbaf2eb37dc0c94a1212d21a74edb25e3a52 | [
"Java",
"INI"
] | 3 | INI | Kanakku/Sistemas-Operativos-I | ceabaf758fda62034af262b19f56d59ddbc50923 | 21263a6353ed250f06e8a1ba72fc04a39b0e4701 | |
refs/heads/main | <repo_name>MinhPhu1999/tlcn_mern<file_sep>/api/routers/cart.router.js
const cart_controller = require('../controllers/cart.controller');
const auth = require('../utils/auth');
module.exports = app => {
app.route('/cart/:id_user').get(auth.authLogin, cart_controller.getCart);
app.route('/cart/get/all').get(cart_controller.getAll);
app.route('/cart/addcart').post(auth.authLogin, cart_controller.addToCart);
app.route('/cart/updatetang').put(auth.authLogin, cart_controller.updateTang);
app.route('/cart/updategiam').put(auth.authLogin, cart_controller.updateGiam);
app.route('/cart/delete/:id_user').delete(auth.authLogin, cart_controller.deleteCart);
app.route('/cart/remove').put(auth.authLogin, cart_controller.deleteProductInCart);
};
<file_sep>/api/models/color_product.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const colorProductSchema = new mongoose.Schema({
colorProduct: [
{
_id: { type: Schema.Types.ObjectId, ref: 'color' },
},
],
products: { type: Schema.Types.ObjectId, ref: 'product' },
});
module.exports = mongoose.model('colorproduct', colorProductSchema);
<file_sep>/api/models/bill.model.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const order = new Schema(
{
id_user: {
type: String,
},
cart: {
type: [
{
name: String,
price: Number,
id_category: String,
image: String,
id_brand: String,
count: Number,
},
],
required: true,
},
order_status: {
type: Boolean,
required: true,
},
order_subtotal: {
type: Number,
required: true,
},
order_date: {
type: Date,
$dateToString: { format: '%Y-%m-%d', date: '$date' },
default: new Date(),
},
shipping_address: {
type: [
{
city: String,
country: String,
posteCode: Number,
number: String,
phone: String,
address: String,
},
],
required: true,
},
// status:{
// type:Boolean
// }
},
{ timestamps: true },
);
module.exports = mongoose.model('bill', order);
<file_sep>/api/controllers/product.controller.js
const product = require('../models/product.model');
const order = require('../models/order.model');
const brandController = require('../controllers/brand.controller');
const categoryController = require('../controllers/category.controller');
const { performance } = require('perf_hooks');
exports.sortProduct = async (req, res) => {
//khai báo các biến cần thiết
const sapXep = req.params.inc;
const listProduct = await product.find({ status: true });
const sortListProduct = listProduct.sort(function (a, b) {
if (sapXep == 'increase') return parseFloat(a.price) - parseFloat(b.price); //sắp xếp sản phẩm tăng dần theo giá
return parseFloat(b.price) - parseFloat(a.price); //sắp xếp sản phẩm giảm dần theo giá
});
sortListProduct
? res.status(200).send(sortListProduct)
: res.status(404).send({ message: 'products not found' });
};
exports.getProductByID = async (req, res) => {
let productFind;
try {
productFind = await product.findOne({ _id: req.params.id });
} catch (err) {
return res.status(500).send({ message: 'Fail' });
}
productFind
? res.status(200).send(productFind)
: res.status(404).send({ message: 'product not found' });
};
exports.getOne = async (req, res) => {
try {
const productFind = await product
.findOne({ _id: req.params.id })
.populate('id_category')
.populate({
path: 'id_category',
select: 'name',
})
.populate('id_brand')
.populate({
path: 'id_brand',
select: 'name',
})
.populate('colorProducts')
.populate({
path: 'colorProducts',
populate: {
path: 'colorProduct',
populate: {
path: '_id',
select: 'name',
},
},
})
.populate('sizeProducts')
.populate({
path: 'sizeProducts',
populate: {
path: 'sizeProduct',
populate: {
path: '_id',
select: 'name',
},
},
});
if (productFind) {
return res.status(200).json(productFind);
} else {
return res.status(404).json({ message: 'Fail' });
}
} catch (err) {
return res.status(404).json({ message: err });
}
};
exports.getProducts = async (req, res) => {
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
let count;
try {
count = await product.countDocuments({ status: true }); // đém sản phẩm có bao nhiêu
} catch (err) {
return res.status(500).send({ message: 'fail' });
}
const totalPage = parseInt((count - 1) / 8 + 1); // từ số lượng sản phẩm sẽ tính ra số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
product
.find({ status: true })
.populate('colorProducts')
.populate({
path: 'colorProducts',
populate: {
path: 'colorProduct',
populate: {
path: '_id',
},
},
})
.populate('sizeProducts')
.populate({
path: 'sizeProducts',
populate: {
path: 'sizeProduct',
populate: {
path: '_id',
},
},
})
.skip(8 * (parseInt(page) - 1))
.limit(8) // giới hạn hiển thị sản phẩm mỗi trang
.exec((err, docs) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
exports.reView = async (req, res) => {
try {
const { rating } = req.body;
if (rating && rating !== 0) {
const productFind = await product.findById(req.params.id);
if (!productFind) return res.status(400).json({ msg: 'Product does not exist.' });
// let num = productFind.numReviews;
// let rate = productFind.rating;
let num = 0;
let rate = 0;
if (productFind.numReviews) {
num = productFind.numReviews;
}
if (productFind.rating) {
rate = productFind.rating;
}
await product.findOneAndUpdate(
{ _id: req.params.id },
{
$set: {
rating: rate + rating,
numReviews: num + 1,
},
},
);
res.json({ msg: 'Update success' });
}
} catch (err) {
return res.status(500).json({ msg: err.message });
}
};
exports.getAllProduct = async (req, res) => {
const productFind = await product.find({ status: true });
productFind
? res.status(200).send(productFind)
: res.status(404).send({ message: 'products not found' });
};
const fullTextSearch = require('fulltextsearch');
var fullTextSearchVi = fullTextSearch.vi;
function escapeRegex(text) {
if (text.indexOf(' ') != -1) return text.split(' ');
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, ' ');
}
exports.searchProduct = async (req, res) => {
if (typeof req.query.name === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
let regex = new RegExp(fullTextSearchVi(req.query.name), 'i');
console.log(regex);
product
.find({
$or: [{ name: { $in: [regex] }, status: true }],
})
.exec((err, docs) => {
err
? res.status(404).send({ message: 'products not found' })
: res.send({ data: docs });
});
// let regex1;
// if (Array.isArray(escapeRegex(req.query.name))) {
// regex1 = new RegExp('.' + escapeRegex(req.query.name)[1], 'i');
// } else {
// regex1 = new RegExp('.' + escapeRegex(req.query.name), 'i');
// }
// const regex2 = new RegExp('^' + escapeRegex(req.query.name), 'i');
// product
// .find({
// $or: [{ name: { $in: [regex1, regex2] }, status: true }],
// })
// .exec((err, docs) => {
// err
// ? res.status(404).send({ message: 'products not found' })
// : res.send({ data: docs });
// });
};
exports.getProductByBrand = async (req, res) => {
if (typeof req.params.brand === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
const searchIDBrand = await brandController.getIDBySearchText(req.params.brand);
let productFind;
try {
productFind = await product.find({
$or: [{ id_brand: new RegExp(searchIDBrand, 'i') }],
});
} catch (err) {
return res.status(500).send({ message: 'products not found' });
}
productFind
? res.status(200).send(productFind)
: res.status(404).send({ message: 'products not found' });
};
exports.getProductByCategory = async (req, res) => {
if (typeof req.body.categoryName === 'undefined' || typeof req.body.disCount === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
const t0 = performance.now();
const { categoryName, disCount, startDate, endDate } = req.body;
const searchIDCategory = await categoryController.getIDBySearchText(categoryName);
let productFind;
try {
productFind = await product.find({
id_category: searchIDCategory,
});
} catch (err) {
return res.status(500).send({ message: 'products not found' });
}
for (let i in productFind) {
product
.updateOne(
{ _id: productFind[i]._id },
{
$set: {
startDate: new Date(startDate),
endDate: new Date(endDate),
disCount: disCount,
},
},
{ upsert: true },
)
.then(err => {
// if(err) console.log('');
});
}
let productFind1 = await product.find({
id_category: searchIDCategory,
// $or: [{ id_category: new RegExp(searchIDCatefory, 'i') }],
});
// const t1 = performance.now();
// console.log(t1 - t0);
res.status(200).send({ productFind1 });
};
exports.updatePriceByCategory = async (req, res) => {
if (
typeof req.body.categoryName === 'undefined' ||
typeof req.body.disCount === 'undefined' ||
typeof req.body.increase === 'undefined'
) {
return res.status(402).send({ message: 'Data invalid' });
}
const { categoryName, disCount, increase } = req.body;
let discount = disCount;
let searchIDCatefory = await categoryController.getIDBySearchText(categoryName, res);
let productFind;
try {
productFind = await product.find({
id_category: searchIDCatefory,
// $or: [{ id_category: new RegExp(searchIDCatefory, 'i') }],
});
} catch (err) {
return res.status(500).send({ message: 'products not found' });
}
if (increase === false) {
disCount = -disCount;
}
for (let i in productFind) {
product
.updateOne(
{ _id: productFind[i]._id },
{
$set: {
price: productFind[i].price + (productFind[i].price * disCount) / 100,
},
},
{ upsert: true },
)
.then(err => {
// console.log(err);
});
}
let productFind1 = await product.find({
id_category: searchIDCatefory,
// $or: [{ id_category: new RegExp(searchIDCatefory, 'i') }],
});
res.status(200).send({ productFind1 });
};
exports.getNameByID = async (req, res) => {
if (typeof req.params.id === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
let result;
try {
result = await product.findOne({ _id: req.params.id });
} catch (err) {
return res.status(404).send({ message: 'product not found' });
}
result
? res.status(200).send({ name: result.name })
: res.status(500).send({ message: 'fail' });
};
exports.getProductTop10 = async (req, res) => {
const orderFind = await order.find({ paymentStatus: 'paid' });
if (orderFind === null) {
return res.status(404).send({ message: 'products not found' });
}
let len = orderFind.length;
let productFind;
let arrProduct = [];
let arr = [];
//lay id product trong order
for (let i = 0; i < len; i++) {
let lenP = orderFind[i].cart.length;
for (let j = 0; j < lenP; j++) {
arr.push(orderFind[i].cart[j].id);
}
}
//chi lay 1 phan tu trong nhung pnan tu trung nhau trong mang
arr = [...new Set(arr)];
//lay thong tin product theo id
for (let id_product of arr) {
productFind = await product.findById(id_product);
arrProduct.push(productFind);
}
res.status(200).json({
data: arrProduct.length > 10 ? arrProduct.slice(0, 10) : arrProduct,
});
};
exports.getProductCategory = async (req, res) => {
// kiểm tra tham số truyền vào có hay không
if (typeof req.query.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
let count = null;
try {
count = await product.countDocuments({ id_category: req.query.id, status: true }); // đém sản phẩm có bao nhiêu
} catch (err) {
return res.status(500).send({ message: err });
}
const totalPage = parseInt((count - 1) / 8 + 1); // từ số lượng sản phẩm sẽ tính ra số trang
const { page } = req.query;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
product
.find({ id_category: req.query.id, status: true })
.populate('colorProducts')
.populate({
path: 'colorProducts',
populate: {
path: 'colorProduct',
populate: {
path: '_id',
},
},
})
.populate('sizeProducts')
.populate({
path: 'sizeProducts',
populate: {
path: 'sizeProduct',
populate: {
path: '_id',
},
},
})
.skip(8 * (parseInt(page) - 1))
.limit(8) // giới hạn hiển thị sản phẩm mỗi trang
.exec((err, docs) => {
err
? res.status(404).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
<file_sep>/api/models/user.model.js
require('dotenv').config();
const mongoose = require('mongoose');
const jwt = require('jsonwebtoken');
const Schema = mongoose.Schema;
const user = new Schema(
{
is_admin: {
type: Boolean,
default: false,
},
name: {
type: String,
},
email: {
type: String,
lowercase: true,
match: [/\S+@\S+\.\S+/, 'is invalid'],
},
address: {
type: {
name: String,
phone: String,
city: String,
district: String,
ward: String,
infoDetail: String,
},
},
fbEmail: {
type: String,
},
ggEmail: {
type: String,
},
password: {
type: String,
},
is_verify: {
type: Boolean,
default: false,
},
token: {
type: String,
},
otp: {
type: String,
},
status: {
type: Boolean,
default: true,
},
},
{ timestamps: true },
);
user.methods.generateJWT = async function () {
const user = this;
const token = jwt.sign({ _id: user._id }, process.env.JWT_KEY, {
expiresIn: '3h',
});
user.token = token;
await user.save();
return token;
};
module.exports = mongoose.model('user', user);
<file_sep>/api/utils/auth.js
const user = require('../models/user.model');
const jwt = require('jsonwebtoken');
require('dotenv').config();
exports.verify = async (req, res) => {
if (typeof req.body.token === 'undefined' || typeof req.body.email === 'undefined') {
res.status(422).send({ message: 'Invalid data' });
return;
}
let token = req.body.token;
let email = req.body.email;
let userFind = await user.findOne({ email: email });
try {
let decoded = await jwt.verify(token, process.env.JWT_KEY);
if (decoded._id == userFind._id) {
res.status(200).send({ message: 'success' });
return;
}
} catch (err) {
res.status(404).send({ message: 'unsuccess1' });
return;
}
res.status(404).send({ message: 'unsuccess' });
};
exports.authLogin = (req, res, next) => {
try {
const token = req.header('Authorization').replace('Bearer ', '');
const data = jwt.verify(token, process.env.JWT_KEY);
user.findOne({ _id: data._id, token: token })
.then(user => {
if (!user) {
return res.status(401).send({
message: 'Please login',
});
}
req.user = user;
next();
})
.catch(err => {
return res.status(401).send({
message: 'Not authorized to access this resource',
});
});
} catch (error) {
res.status(401).send({ message: 'Time out, Please login again' });
}
};
<file_sep>/api/controllers/user.controller.js
const user = require('../models/user.model');
const nodemailer = require('../utils/nodemailer');
const { OAuth2Client } = require('google-auth-library');
const fetch = require('node-fetch');
const jwt = require('jsonwebtoken');
const bcrypt = require('bcrypt');
const maotp = require('../utils/otp');
const validate = require('../utils/validate');
require('dotenv').config();
const client = new OAuth2Client(process.env.GOOGLE_API_KEY);
exports.register = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (
typeof req.body.email === 'undefined' ||
typeof req.body.password === '<PASSWORD>' ||
typeof req.body.name === 'undefined' ||
typeof req.body.repassword === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
let { email, password, name, repassword } = req.body;
//kiểm tra điều kiện password hợp lệ
if (!validate.isValidPassWord(password) || password.trim().length < 8) {
return res.status(422).send({
message:
'Passwords with a length of 8-16 characters must contain numbers, lowercase letters and uppercase letters ',
});
}
//kiểm tra tên có hợp lệ không
if (name.trim().length < 6 || !validate.isValidName(name)) {
return res.status(422).send({ message: 'Name must be at least 6 characters long' });
}
//kiểm tra điều kiện email và password
if (email.indexOf('@') === -1 && email.indexOf('.') === -1) {
return res.status(422).send({ message: 'Invalid data' });
}
//nếu password và repassword khác nhau
if (password != repassword) {
return res.status(422).send({ message: 'Password incorect' });
}
let userFind = null;
try {
userFind = await user.find({ email: email }); //tìm kiếm user theo email
} catch (err) {
return res.status(500).send({ message: 'user not found' });
}
if (userFind.length > 0) {
//trường hợp có user trong db
return res.status(409).send({ message: 'Email already exist' });
}
//hash password
password = bcrypt.hashSync(password, 10);
//tạo mới user
const newUser = new user({
email: email,
name: name.trim(),
password: <PASSWORD>,
});
try {
await newUser
.save() //lưu user
.then(function () {
newUser.generateJWT(); //tạo token
});
} catch (err) {
return res.status(500).send({ message: err });
}
const sendEmail = await nodemailer.sendEmail(email, newUser.token); //gửi mail để verify account
if (!sendEmail) {
await newUser.remove();
return res.status(500).send({ message: 'Send email fail' });
}
res.status(201).send({ message: 'Register success' });
};
exports.verifyAccount = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.token === 'undefined') {
return res.status(402).send({ message: '!invalid' });
}
//khai báo các biến cần thiết
let tokenFind = null;
try {
tokenFind = await user.findOne({ token: req.params.token }); //tìm kiếm user theo token
} catch (err) {
return res.status(500).send({ message: 'user not found' });
}
try {
//lưu các thay đổi
await user.findByIdAndUpdate(tokenFind._id, { $set: { is_verify: true } }, err => {
err
? res.status(500).send({ message: 'verify account fail' })
: res.status(200).send({ message: 'verify account success' });
});
} catch (err) {
return res.status(500).send({ message: err });
}
};
exports.login = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.body.email === 'undefined' || typeof req.body.password == 'undefined') {
return res.status(402).send({ message: 'email or password wrrong' });
}
//khai báo các biến cần thiết
const { email, password } = req.body;
let userFind = null;
try {
userFind = await user.findOne({ email: email }); //tìm kiếm user theo email
} catch (err) {
return res.status(402).send({ message: 'user not found' });
}
if (userFind === null) {
//trường hợp không có user trong db
return res.status(422).send({ message: 'not found user in database' });
}
if (!userFind.is_verify) {
//trường hợp account chưa verify
return res.status(401).send({ message: 'no_registration_confirmation' });
}
if (!bcrypt.compareSync(password, userFind.password)) {
//trường hợp sai mật khẩu
return res.status(422).send({ message: 'password wrong' });
}
userFind.generateJWT(); //tạo token
//thông báo login success
res.status(200).send({
message: 'login success',
token: userFind.token,
newUser: {
email: userFind.email,
name: userFind.name,
_id: userFind._id,
},
});
};
exports.getUser = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.id === 'undefined') {
return res.status(402).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
const { id } = req.params;
let email;
let userFind = null;
try {
userFind = await user.findOne({ _id: id }); //tìm kiếm user theo id
} catch (err) {
return res.send({ message: 'user not found' });
}
if (userFind.fbEmail != null) {
email = userFind.fbEmail;
}
if (userFind.ggEmail != null) {
email = userFind.ggEmail;
} else {
email = userFind.email;
}
res.status(200).send({
user: {
//trả về email và name của user
email: email,
name: userFind.name,
address: userFind.address,
},
});
};
exports.requestForgotPassword = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.email === 'undefined') {
return res.status(402).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
const { email } = req.params;
let userFind = null;
try {
userFind = await user.findOne({ email: email }); //tìm kiếm user theo email
} catch (err) {
return res.send({ message: 'user not found' });
}
if (userFind === null) {
//trường hợp không có user trong db
return res.status(422).send({ message: 'Email not exist in database' });
}
if (!userFind.is_verify) {
//trường hợp account chưa verify
return res.status(401).send({ message: 'no_registration_confirmation' });
}
//sinh mã otp
let otp = maotp.generateOTP();
//gửi otp qua email của email
const sendEmail = await nodemailer.sendEmailForgotPassword(email, otp);
if (!sendEmail) {
//trường hợp gửi mail fail
return res.status(500).send({ message: 'Send email fail' });
}
userFind.otp = otp; //cập nhật mã otp
try {
userFind.save(err => {
err
? res.status(500).send({ message: 'fail' })
: res.status(201).send({ message: 'success', email: email });
}); //lưu các thay đổi
} catch (err) {
return res.status(500).send({ message: err });
}
//thông báo thành công
};
exports.verifyForgotPassword = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.body.email === 'undefined' || typeof req.body.otp === 'undefined') {
return res.status(402).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
const { email, otp } = req.body;
let userFind = null;
try {
userFind = await user.findOne({ email: email, otp: otp }); //tìm kiếm user theo email
} catch (err) {
return res.send({ message: 'user not found' });
}
userFind
? res.status(200).send({ message: 'success', otp: otp })
: res.status(422).send({ message: 'OTP fail' });
};
exports.forgotPassword = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (
typeof req.body.email === 'undefined' ||
typeof req.body.otp === 'undefined' ||
typeof req.body.newPassword === 'undefined'
) {
return res.status(402).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
const { email, otp, newPassword } = req.body;
let userFind = null;
try {
userFind = await user.findOne({ email: email }); //tìm kiếm user theo email
} catch (err) {
return res.send({ message: 'user not found' });
}
if (userFind === null) {
//trường hợp không có user trong db
return res.status(422).send({ message: 'User not found in database' });
}
//trường hợp kiểm tra otp nhập vào khác với otp trong db
if (userFind.otp != otp) {
return res.status(422).send({ message: 'OTP fail' });
}
// if (!validate.isValidPassWord(newPassword) || newPassword.trim().length < 8) {
// return res.status(422).send({
// message:
// 'Passwords with a length of 8-16 characters must contain numbers, lowercase letters and uppercase letters ',
// });
// }
//hash password
userFind.password = bcrypt.hashSync(newPassword, 10);
try {
await userFind.save(err => {
err
? res.status(500).send({ message: 'fail' })
: res.status(201).send({ message: 'success' });
}); //lưu các thay đổi
} catch (err) {
return res.status(500).send({ message: err });
}
};
exports.updateInfor = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (
typeof req.body.name === 'undefined' ||
typeof req.body.id === 'undefined' ||
typeof req.body.email === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
const { email, name, id } = req.body;
// if (name.trim().length < 6 || !validate.isValidName(name)) {
// return res.status(422).send({ message: 'Name must be at least 6 characters long' });
// }
let newUser = await user.findById(id);
//tìm kiếm user theo email
let userFind = await user.findOne({ email: email });
//trường hợp email đã có trong db
if (userFind != null && newUser.email !== email) {
return res.status(422).send({ message: 'Email already exist' });
}
//cập nhật thay đổi
newUser.name = name;
newUser.email = email;
try {
await newUser.save(); //lưu các thay đổi
} catch (err) {
return res.status(500).send({ message: 'Invalid email' });
}
//thông báo update infor thành công
res.status(200).send({
message: 'success',
token: newUser.token,
newUser: {
email: newUser.email,
name: newUser.name,
_id: newUser._id,
},
});
};
exports.updatePassword = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (
typeof req.body.oldpassword === 'undefined' ||
typeof req.body.newpassword === 'undefined' ||
typeof req.body.id === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo các biến cần thiết
let { id, oldpassword, newpassword } = req.body;
if (oldpassword === newpassword) {
return res
.status(422)
.send({ message: 'The new password must not be the same as the old password' });
}
let userFind = null;
try {
userFind = await user.findOne({ _id: id }); //tìm kiếm user theo id
} catch (err) {
return res.send({ message: 'user not found' });
}
if (!userFind) {
//trường hợp không có user trong db
return res.status(422).send({ message: 'User not found' });
}
//trường hợp nhập mật khẩu cũ không khớp
if (!bcrypt.compareSync(oldpassword, userFind.password)) {
return res.status(422).send({ message: 'Password wrong' });
}
if (!validate.isValidPassWord(newpassword) || newpassword.trim().length < 8) {
return res.status(422).send({
message:
'Passwords with a length of 8-16 characters must contain numbers, lowercase letters and uppercase letters ',
});
}
//hash newpassword
userFind.password = bcrypt.hashSync(newpassword, 10);
try {
userFind.save(err => {
err
? res.status(500).send({ message: 'Update password fail' })
: res.status(200).send({ message: 'Update password success' });
}); //lưu các thay đổi
} catch (err) {
return res.status(500).send({ message: err });
}
};
exports.getDataByID = async id_user => {
let userFind = await user.findOne({ _id: id_user });
if (userFind.fbEmail != null) {
email = userFind.fbEmail;
} else if (userFind.ggEmail != null) {
email = userFind.ggEmail;
} else {
email = userFind.email;
}
return [userFind.name, email];
};
exports.googleController = async (req, res) => {
const { idToken } = req.body;
// console.log(idToken);
client.verifyIdToken({ idToken, audience: process.env.GOOGLE_API_KEY }).then(response => {
// console.log('GOOGLE LOGIN RESPONSE',response)
const { email_verified, name, email } = response.payload;
if (email_verified) {
user.findOne({ ggEmail: email }).exec((err, newUser) => {
if (newUser) {
//newUser.generateJWT();
const token = jwt.sign({ _id: newUser._id }, process.env.JWT_KEY, {
expiresIn: '3h',
});
newUser.token = token;
newUser.save();
//const token = newUser.token;
const { _id, email, name } = newUser;
return res.json({
token,
newUser: { _id, email, name },
});
} else {
let password = email + process.env.JWT_KEY;
newUser = new user({
name: name,
ggEmail: email,
password: <PASSWORD>,
is_verify: true,
});
newUser.save((err, data) => {
if (err) {
console.log('ERROR GOOGLE LOGIN ON USER SAVE', err);
return res.status(400).json({
error: 'User signup failed with google',
});
}
const token = jwt.sign({ _id: data._id }, process.env.JWT_KEY, {
expiresIn: '3h',
});
data.token = token;
data.save();
const { _id, email, name, role } = data;
return res.json({
token,
newUser: { _id, email, name, role },
});
});
}
});
} else {
return res.status(400).json({
error: 'Google login failed. Try again',
});
}
});
};
exports.facebookController = (req, res) => {
const { userID, accessToken } = req.body;
const url = `https://graph.facebook.com/v2.11/${userID}/?fields=id,name,email&access_token=${accessToken}`;
return (
fetch(url, {
method: 'GET',
})
.then(response => response.json())
// .then(response => console.log(response))
.then(response => {
const { email, name } = response;
user.findOne({ fbEmail: email }).exec((err, newUser) => {
if (newUser) {
//newUser.generateJWT();
const token = jwt.sign({ _id: newUser._id }, process.env.JWT_KEY, {
expiresIn: '3h',
});
newUser.token = token;
newUser.save();
const { _id, email, name } = newUser;
return res.json({
token,
newUser: { _id, email, name },
});
} else {
let password = email + process.env.JWT_KEY;
newUser = new user({
name: name,
fbEmail: email,
password: <PASSWORD>,
is_verify: true,
});
newUser.save((err, data) => {
if (err) {
console.log('ERROR FACEBOOK LOGIN ON USER SAVE', err);
return res.status(400).json({
error: 'User signup failed with facebook',
});
}
const token = jwt.sign({ _id: data._id }, process.env.JWT_KEY, {
expiresIn: '3h',
});
data.token = token;
data.save();
const { _id, email, name } = data;
return res.json({
token,
newUser: { _id, email, name },
});
});
}
});
})
.catch(error => {
res.json({
error: 'Facebook login failed. Try later',
});
})
);
};
exports.addAddress = async (req, res) => {
try {
const userF = await user.findById(req.body.id);
if (!userF) return res.status(400).json({ message: 'User not found' });
if (req.body.address.name.length === 0 || req.body.address.name.length >= 30)
return res.status(400).json({ msg: 'Form is not format' });
if (req.body.address.phone.length !== 10)
return res.status(400).json({ msg: 'Please enter the correct phone number' });
await user.findOneAndUpdate(
{ _id: req.body.id },
{
$set: {
address: req.body.address,
},
},
);
return res.status(200).json('Add address');
} catch (err) {
return res.status(500).json({ msg: error.message });
}
};
exports.updateAddress = async (req, res) => {
try {
const userF = await user.findById(req.body.id);
if (!user) return res.status(400).json({ message: 'User not found' });
if (req.body.address.name.length === 0 || req.body.address.name.length >= 30)
return res.status(400).json({ msg: 'Form is not format' });
if (req.body.address.phone.length !== 10)
return res.status(400).json({ msg: 'Please enter the correct phone number' });
await user.findOneAndUpdate(
{ _id: req.body.id },
{
$set: {
address: req.body.address,
},
},
);
return res.status(200).json('Update address');
} catch (err) {
return res.status(500).json({ msg: err.message });
}
};
<file_sep>/api/models/brand.model.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const brand = new Schema(
{
name: {
type: String,
required: [true, 'Không được bỏ trống'],
},
status: {
type: Boolean,
default: true,
},
},
{ timestamps: true },
);
module.exports = mongoose.model('brand', brand);
<file_sep>/api/routers/size.router.js
const size_controller = require('../controllers/size.controller');
module.exports = app => {
app.route('/size/getsizes').get(size_controller.getSizes);
};
<file_sep>/api/config/connectDB.js
const mongoose = require('mongoose');
require('dotenv').config();
const connectDB = async () => {
await mongoose
.connect(process.env.Mongo_URL, {
useNewUrlParser: true,
useCreateIndex: true,
useFindAndModify: false,
useUnifiedTopology: true,
})
.catch(error => console.log(error.reason));
};
module.exports = connectDB;
<file_sep>/api/config/multer.js
const multer = require('multer');
const path = require('path');
module.exports = multer({
storage: multer.diskStorage({
destination: './files/',
filename: (req, file, cb) => {
let filename = `${Date.now()}-${file.originalname}`;
cb(null, filename);
},
}),
// limits: (file, cb) => {
// let sizeFile = file.fileSize
// if(sizeFile > 400000){
// return cb(new Error("Limited is 2MB"));
// }
// cb(null, true)
// },
fileFilter: (req, file, cb) => {
let ext = path.extname(file.originalname);
if (ext !== '.jpg' && ext !== '.jpeg' && ext !== '.png' && ext !== '.jfif') {
cb(new Error('File type is not supported'));
return;
}
cb(null, true);
},
});
<file_sep>/api/routers/brand.router.js
const brand_controller = require('../controllers/brand.controller');
module.exports = (app) => {
app.route('/brands').get(brand_controller.getBrands);
app.route('/brand/name/:id').get(brand_controller.getNameByID);
};
<file_sep>/api/controllers/stock.controller.js
'usr strict';
const stock = require('../models/stock.model');
exports.getDataByID = async id_stock => {
let result = null;
try {
result = await stock.findById(id_stock);
} catch (err) {
console.log(err);
return;
}
if (result === null) {
console.log('user not found');
return;
}
return [result.name_category, result.name_brand];
};
<file_sep>/api/models/banner.model.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const banner = new Schema({
content: {
type: String,
require: [true, 'không được để trống'],
},
categoryName: String,
id_category: String,
disCount: Number,
status: Boolean,
});
module.exports = mongoose.model('banner', banner);
<file_sep>/api/controllers/category.controller.js
const category = require('../models/category.model');
exports.getCategorys = async (req, res) => {
category.find({ status: true }, (err, docs) => {
err
? res.status(500).json({ message: 'category not found' })
: res.status(200).json({ data: docs });
});
};
exports.getNameByID = async (req, res) => {
if (req.params.id === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
let result;
try {
result = await category.findById(req.params.id);
} catch (err) {
return res.status(404).send({ message: 'category not found catch' });
}
result
? res.status(200).send({ name: result.name })
: res.status(404).send({ message: 'category not found' });
};
exports.getIDBySearchText = async (searchText, res) => {
let arr = [];
try {
arr = await category.findOne({ name: searchText }); //, 'i',{name: 0}
// arr = await category.find({ name: new RegExp(searchText, 'i') }); //, 'i',{name: 0}
} catch (err) {
res.status(500).send({ message: err });
return;
}
return arr._id;
};
<file_sep>/api/models/category.model.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const category = new Schema(
{
name: {
type: String,
required: [true, 'Không được bỏ trống'],
},
status: {
type: Boolean,
default: true,
},
},
{ timestamps: true },
);
module.exports = mongoose.model('category', category);
<file_sep>/api/routers/banner.router.js
const banner_controller = require('../controllers/banner.controller');
module.exports = app => {
app.route('/banners').get(banner_controller.getBanners);
};
<file_sep>/api/utils/quantity.js
const product = require('../models/product.model');
exports.changeQuantity = async (id_product, quantity) => {
let productFind = await product.findOne({ _id: id_product });
productFind.quantity -= quantity;
productFind.save();
};
exports.calPrice = (price, quantity) => {
return price * quantity;
};
exports.valid = (productCart, id_product, size, color) => {
if (productCart.id == id_product && productCart.color == color && productCart.size == size) {
return true;
}
return false;
};
<file_sep>/api/routers/category.router.js
const category_controller = require('../controllers/category.controller');
module.exports = app => {
app.route('/categorys').get(category_controller.getCategorys);
app.route('/category/name/:id').get(category_controller.getNameByID);
};
<file_sep>/index.js
//Khai báo các thư viện cần thiết
require('dotenv').config();
const express = require('express');
const app = express();
const port = process.env.PORT || 8080;
const path = require('path');
const bodyParser = require('body-parser');
const mongoose = require('mongoose');
const cors = require('cors');
const connectDB = require('./api/config/connectDB');
const userRouter = require('./api/routers/user.router');
const categoryRouter = require('./api/routers/category.router');
const productRouter = require('./api/routers/product.router');
const brandRouter = require('./api/routers/brand.router');
const cartRouter = require('./api/routers/cart.router');
const orderRouter = require('./api/routers/order.router');
const adminRouter = require('./api/routers/admin.router');
const commentRouter = require('./api/routers/comment.router');
const bannerRouter = require('./api/routers/banner.router');
const colorRouter = require('./api/routers/color.router');
const sizeRouter = require('./api/routers/size.router');
const promoRouter = require('./api/routers/promocode.router');
//model comment
const Comments = require('./api/models/comment.model');
//kết nối tới database mongo
connectDB();
//có phép nhận dữ liệu từ form
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
//cors
app.use(cors());
const http = require('http').createServer(app);
const io = require('socket.io')(http, {
cors: {
origin: '*'
}
});
// Soketio
let users = [];
// io.on('connection', socket => {
// console.log(socket.id + 'connected.');
// socket.on('disconnect', () => {
// console.log(socket.id + 'disconnected');
// });
// });
io.on('connection', socket => {
// console.log(socket.id + 'connected');
socket.on('joinRoom', id => {
const user = { userId: socket.id, room: id };
const check = users.every(user => user.userId !== socket.id);
if (check) {
users.push(user);
socket.join(user.room);
} else {
users.map(user => {
if (user.userId === socket.id) {
if (user.room !== id) {
socket.leave(user.room);
socket.join(id);
user.room = id;
}
}
});
}
});
socket.on('createComment', async msg => {
const { username, content, product_id, createdAt, rating, send } = msg;
// console.log('create comment');
const newComment = new Comments({
username,
content,
product_id,
createdAt,
rating,
});
// // console.log(newComment);
// await newComment.save();
if (send === 'replyComment') {
const { _id, username, content, product_id, createdAt, rating } = newComment;
const comment = await Comments.findById(product_id);
if (comment) {
comment.reply.push({ _id, username, content, createdAt, rating });
await comment.save();
io.to(comment.product_id).emit('sendReplyCommentToClient', comment);
}
} else {
await newComment.save();
io.to(newComment.product_id).emit('sendCommentToClient', newComment);
}
});
socket.on('disconnect', () => {
// console.log(socket.id + 'disconnect');
users = users.filter(user => user.userId !== socket.id);
});
});
userRouter(app);
categoryRouter(app);
brandRouter(app);
productRouter(app);
cartRouter(app);
orderRouter(app);
adminRouter(app);
commentRouter(app);
bannerRouter(app);
colorRouter(app);
sizeRouter(app);
promoRouter(app);
app.get('/', (req, res) => {
res.send('welcome to e_store');
});
http.listen(port, () => console.log('server running on port ' + port));
<file_sep>/api/routers/product.router.js
const product_controller = require('../controllers/product.controller');
module.exports = app => {
app.route('/product/getproducts/:page').get(product_controller.getProducts);
app.route('/products').get(product_controller.getAllProduct);
app.route('/product/:id').get(product_controller.getOne);
app.route('/updateRate/:id').patch(product_controller.reView);
app.route('/product/search/s').get(product_controller.searchProduct);
app.route('/product/brand/:brand').get(product_controller.getProductByBrand);
app.route('/product/category').put(product_controller.getProductByCategory);
app.route('/product/amount/:id').get(product_controller.getNameByID);
app.route('/product/sort/:inc').get(product_controller.sortProduct);
app.route('/product/banchay/top10').get(product_controller.getProductTop10);
app.route('/product/category/cate').get(product_controller.getProductCategory);
};
<file_sep>/api/routers/color.router.js
const color_controller = require('../controllers/color.controller');
module.exports = app => {
app.route('/color/getcolors').get(color_controller.getColors);
};
<file_sep>/api/controllers/comment.controller.js
const comment = require('../models/comment.model');
class APIfeatures {
constructor(query, queryString) {
this.query = query;
this.queryString = queryString;
}
sorting() {
this.query = this.query.sort('-createdAt');
return this;
}
paginating() {
const page = this.queryString.page * 1 || 1;
const limit = this.queryString.limit * 1 || 5;
const skip = (page - 1) * limit;
this.query = this.query.skip(skip).limit(limit);
return this;
}
}
exports.getComment = async (req, res) => {
try {
const features = new APIfeatures(comment.find({ product_id: req.params.id }), req.query)
.sorting()
.paginating();
const comments = await features.query;
res.json({
status: 'success',
result: comments.length,
comments,
});
} catch (err) {
return res.status(500).json({ msg: err.message });
}
};
exports.updateComment = async (req, res) => {
if (typeof req.body.id === 'undefined' || typeof req.body.content === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
const { id, content } = req.body;
comment
.updateOne(
{ _id: id },
{
$set: {
content: content,
},
},
)
.exec(err => {
if (err) {
return res.status(400).send({ error });
}
res.status(201).send({ message: 'success' });
});
};
exports.deleteComment = async (req, res) => {
//kiểm tra tham số truyền vào đúng hay không
if (typeof req.body.id === 'undefined' || typeof req.body.user_id === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
//tìm kiếm theo id và user_id trong model comment
const commentFind = await comment.find({ id: id, user_id: user_id });
if (commentFind === null)
return res.status(404).send({ message: 'You cannot delete other people\'s comments' });
//update lại status của comment
comment
.updateOne(
{ _id: req.params.id },
{
$set: {
status: false,
},
},
)
.exec(err => {
if (err) {
return res.status(400).send({ error });
}
res.status(201).send({ message: 'delete success' });
});
};
<file_sep>/api/models/cart.model.js
const mongoose = require('mongoose');
const product = require('../models/product.model');
const Schema = mongoose.Schema;
const cart = new Schema({
id_user: {
type: String,
},
products: {
type: [
{
name: String,
price: Number,
img: String,
quantity: Number,
id: String,
color: { type: Schema.Types.ObjectId, ref: 'color' },
size: { type: Schema.Types.ObjectId, ref: 'size' },
},
],
required: true,
minlength: 1,
},
grandTotal: {
type: Number,
},
status: {
type: Boolean,
default: true,
},
date_cart: {
type: Date,
default: Date.now(),
},
});
cart.methods.updateCountProduct = async function () {
const cart = this;
let index;
for (let i = 0; i < cart.products.length; i++) {
index = cart.products.findIndex(element => cart.products[i]._id === element._id);
if (index !== -1) {
cart.grandTotal = cart.products[index].price * cart.products[index].quantity;
}
}
};
cart.methods.minusQuantity = async function (id_size, res) {
const cart = this;
};
cart.methods.plusProduct = async function (id_product) {
let productFind = await product.findById(id_product);
productFind.quantity += 1;
try {
await productFind.save();
} catch (err) {
console.log(err);
return;
}
};
module.exports = mongoose.model('cart', cart);
<file_sep>/api/utils/orderby.js
exports.ordered = (type, orderFind) => {
if (type === 1) {
if (
orderFind.orderStatus[0].isCompleted &&
!orderFind.orderStatus[1].isCompleted &&
!orderFind.orderStatus[2].isCompleted &&
!orderFind.orderStatus[3].isCompleted
)
return true;
}
if (type === 2) {
if (
orderFind.orderStatus[1].isCompleted &&
!orderFind.orderStatus[2].isCompleted &&
!orderFind.orderStatus[3].isCompleted
)
return true;
}
if (type === 3) {
if (orderFind.orderStatus[2].isCompleted && !orderFind.orderStatus[3].isCompleted)
return true;
}
if (type === 4) {
if (orderFind.orderStatus[3].isCompleted) return true;
}
return false;
};
<file_sep>/api/controllers/brand.controller.js
const brand = require('../models/brand.model');
exports.getBrands = async (req, res) => {
//get tất cả brand theo status = true
brand.find({ status: true }, (err, docs) => {
err
? res.status(500).json({ message: 'brand not found' })
: res.status(200).json({ data: docs });
});
};
exports.getNameByID = async (req, res) => {
//kiểm tra có truyền vô id hay không
if (req.params.id === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo biến result
let result = null;
try {
result = await brand.findOne({ _id: req.params.id }); //tìm kiếm brand theo id
} catch (err) {
return res.status(404).send({ message: 'brand not found' });
}
result
? res.status(200).send({ name: result.name })
: res.status(404).send({ message: 'not found' });
};
exports.getIDBySearchText = async searchText => {
//khai báo mảng
let arr = [];
try {
arr = await brand.find({ name: new RegExp(searchText, 'i') }); //tìm kiếm theo tên
} catch (err) {
res.status(500).send({ message: err });
return;
}
return arr.map(i => i.id); //trả về cái mảng
};
<file_sep>/api/controllers/color.controller.js
const color = require('../models/color.model');
// Get all color -- find(query, projection)
module.exports.getColors = (req, res) => {
color.find({ status: true }, (err, colors) => {
err
? res.status(500).json({ message: 'colors not found' })
: res.status(200).json({ data: colors });
});
};
<file_sep>/api/controllers/admin.controller.js
// load các models và thư viện cần thiết
const category = require('../models/category.model');
const brand = require('../models/brand.model');
const user = require('../models/user.model');
const stock = require('../models/stock.model');
const size = require('../models/size.model');
const color = require('../models/color.model');
const promocode = require('../models/promocode.model');
const size_product = require('../models/size_product');
const color_product = require('../models/color_product');
const banner = require('../models/banner.model');
const product = require('../models/product.model');
const categoryCtrl = require('./category.controller');
const image_product = require('../models/image_product');
const stockController = require('../controllers/stock.controller');
const bcrypt = require('bcrypt');
require('dotenv').config();
const cloudinary = require('../config/cloudinary');
const uploadImg = async path => {
let res;
try {
res = await cloudinary.uploader.upload(path); //upload ảnh lên cloudinary
} catch (err) {
return false;
}
return res.secure_url;
};
exports.addProduct = async (req, res) => {
//kiểm tra có đủ tham số truyền vào hay không
if (
typeof req.files === 'undefined' ||
typeof req.body.name === 'undefined' ||
typeof req.body.id_category === 'undefined' ||
typeof req.body.price === 'undefined' ||
typeof req.body.id_brand === 'undefined' ||
typeof req.body.description === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
req.body.sizeProduct = JSON.parse(req.body.sizeProduct);
req.body.colorProduct = JSON.parse(req.body.colorProduct);
const { name, id_category, id_brand, quantity, price, description, sizeProduct, colorProduct } =
req.body;
let urls = [];
let id_product;
const files = req.files;
for (const file of files) {
const { path } = file;
const result = await uploadImg(path);
urls.push(result);
}
const nColor = new color_product({ colorProduct });
const nSize = new size_product({ sizeProduct });
const nProduct = new product({
name,
id_category,
id_brand,
images: urls,
description,
price,
quantity,
});
const sP = await nProduct.save((err, docProduct) => {
if (docProduct) id_product = docProduct._id;
});
const sS = await nSize.save(async (err, doc) => {
if (doc) {
doc.products = id_product;
await doc.save();
product
.updateOne(
{ _id: id_product },
{
$set: {
sizeProducts: doc._id,
},
},
)
.then(data => {
if (data) {
console.log('size ngoai');
}
});
}
});
const sC = await nColor.save(async (err, doc) => {
if (doc) {
doc.products = id_product;
await doc.save();
product
.updateOne(
{ _id: id_product },
{
$set: {
colorProducts: doc._id,
},
},
)
.then(data => {
if (data) {
console.log('color ngoai');
}
});
}
});
res.status(201).send({ message: 'add product success' });
};
exports.updateProduct = async (req, res) => {
// kiểm tra có đủ tham số truyền vào hay không
if (
typeof req.body.name === 'undefined' ||
typeof req.body.id === 'undefined' ||
typeof req.body.id_category === 'undefined' ||
typeof req.body.price === 'undefined' ||
typeof req.body.id_brand === 'undefined' ||
typeof req.body.description === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
let {
id,
name,
id_category,
id_brand,
price,
description,
quantity,
status,
sizeProduct,
colorProduct,
} = req.body;
const productFind = await product.findOne({ _id: id }); //tìm kiếm product bằng id
const id_colorP = productFind.colorProducts;
const id_sizeP = productFind.sizeProducts;
let urls = [];
if (req.files.length > 0) {
const files = req.files;
for (const file of files) {
const { path } = file;
const result = await uploadImg(path);
urls.push(result);
}
} else {
urls = productFind.images;
}
if (colorProduct != 'null') {
colorProduct = JSON.parse(colorProduct);
color_product
.updateOne(
{ _id: id_colorP },
{
$set: {
colorProduct: colorProduct,
},
},
)
.then(err => {
// if (err) console.log('color product');
});
}
if (sizeProduct != 'null') {
sizeProduct = JSON.parse(sizeProduct);
size_product
.updateOne(
{ _id: id_sizeP },
{
$set: {
sizeProduct: sizeProduct,
},
},
)
.then(err => {
// if (err) console.log('size product');
});
}
product.updateOne(
{ _id: id },
{
$set: {
name,
id_category,
price,
quantity,
images: urls,
id_brand,
description,
status,
},
},
(err, data) => {
err
? res.status(500).json({ message: 'Fail' })
: res.status(200).json({ message: 'Success' });
},
);
};
exports.deleteProduct = async (req, res) => {
product.updateOne({ _id: req.params.id }, { $set: { status: false } }).exec(err => {
err
? res.status(400).send({ message: err })
: res.status(200).send('delete product success');
});
};
exports.getOne = async (req, res) => {
product
.findOne({ _id: req.params.id })
.populate('colorProducts')
.populate({
path: 'colorProducts',
populate: {
path: 'colorProduct',
populate: {
path: '_id',
},
},
})
.populate('sizeProducts')
.populate({
path: 'sizeProducts',
populate: {
path: 'sizeProduct',
populate: {
path: '_id',
},
},
})
.exec(function (err, data) {
err ? res.status(404).json({ message: err }) : res.status(200).json({ data });
});
};
exports.getAllProducts = async (req, res) => {
// kiểm tra tham số truyền vào có hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
let count = null;
try {
count = await product.countDocuments({}); // đém sản phẩm có bao nhiêu
} catch (err) {
return res.status(500).send({ message: err });
}
const totalPage = parseInt((count - 1) / 5 + 1); // từ số lượng sản phẩm sẽ tính ra số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
product
.find({})
.populate('colorProducts')
.populate({
path: 'colorProducts',
populate: {
path: 'colorProduct',
populate: {
path: '_id',
},
},
})
.populate('sizeProducts')
.populate({
path: 'sizeProducts',
populate: {
path: 'sizeProduct',
populate: {
path: '_id',
},
},
})
.skip(5 * (parseInt(page) - 1))
.limit(5) // giới hạn hiển thị sản phẩm mỗi trang
.exec((err, docs) => {
err
? res.status(404).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
//stock
exports.addStock = async (req, res) => {
//kiểm tra các tham số truyền vào có đủ hay không
if (
typeof req.body.name_category === 'undefined' ||
typeof req.body.name_brand === 'undefined' ||
typeof req.body.quantity === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
const { name_category, name_brand, quantity } = req.body; //khai cái tham số cần thiết
let stockFind;
try {
stockFind = await stock.find({
name_category: name_category,
name_brand: name_brand,
}); //tìm kiếm tên category và tên brand
} catch (err) {
return res.status(500).send({ message: err });
}
if (stockFind.length > 0) {
//nếu độ dài lớn hơn 0 thì thông báo đã có trong database
return res.status(409).send({ message: 'stock already exist' });
}
const newStock = new stock({
//tạo mới stock
name_category: name_category,
name_brand: name_brand,
quantity: quantity,
status: true,
});
const newCategory = new category({
//tạo mới category
name: name_category,
path: path,
status: true,
});
const newBrand = new brand({
// tạo mới brand
name: name_brand,
status: true,
});
try {
// lưu tất cả vào trong mongo
await newStock.save();
await newCategory.save();
await newBrand.save();
} catch (err) {
// console.log(err);
return res.status(500).send({ message: err });
}
res.status(201).send({ message: 'add stock success' }); // thông báo thên thành công
};
exports.updateStock = async (req, res) => {
// kiểm tra các tham số truyền vào có đủ hay không
if (
typeof req.body.id === 'undefined' ||
typeof req.body.name_category === 'undefined' ||
typeof req.body.path === 'undefined' ||
typeof req.body.name_brand === 'undefined' ||
typeof req.body.quantity === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
const { id, name_category, path, name_brand, quantity, status } = req.body; //khai báo tham số
const getNameStock = await stockController.getDataByID(id); // lấy tên theo id
let stockFind = null;
let categoryFind = null;
let brandFind = null;
try {
stockFind = await stock.findById(id); //tìm bằng id
categoryFind = await category.findOne({
name: getNameStock[0],
status: true,
}); //tìm kiếm bằng tên và status là true
brandFind = await brand.findOne({
name: getNameStock[1],
status: true,
}); //tìm kiếm bằng tên và status là true
} catch (err) {
return res.status(500).send({ message: err });
}
if (stockFind === null) {
return res.status(422).send({ message: 'stock not found' });
}
// update lại thông tin
stockFind.name_category = name_category;
stockFind.name_brand = name_brand;
stockFind.quantity = quantity;
stockFind.status = status;
categoryFind.name = name_category;
categoryFind.path = path;
categoryFind.status = status;
brandFind.name = name_brand;
brandFind.status = status;
try {
//lưu các thay đổi vào mongo
await stockFind.save();
await categoryFind.save();
await brandFind.save();
} catch (err) {
// console.log(err);
return res.status(500).send({ message: err });
}
res.status(201).send({
message: 'update stock success',
stock: {
name_category: name_category,
name_brand: name_brand,
quantity: quantity,
},
});
};
exports.deleteStock = async (req, res) => {
//kiểm tra các tham số truyền vào có đủ hay không
if (typeof req.params.id === 'undefined') {
return res.status(402).send({ message: 'data invalid' });
}
const getNameStock = await stockController.getDataByID(req.params.id); //tìm kiếm tên theo id
let stockFind = null;
let categoryFind = null;
let brandFind = null;
try {
stockFind = await stock.findById(id); //tìm kiếm bằng id
categoryFind = await category.findOne({
name: getNameStock[0],
status: true,
}); //tìm kiếm bằng tên và status là true
brandFind = await brand.findOne({
name: getNameStock[1],
status: true,
}); //tìm kiếm bằng tên và status là true
} catch (err) {
// console.log(err);
return res.status(500).send({ message: 'server found' });
}
if (brandFind === null) {
return res.status(400).send({ message: 'stock not found' });
}
//update lại status
stockFind.status = false;
categoryFind.status = false;
brandFind.status = false;
try {
//lưu các thay đổi vào mongo
await stockFind.save();
await categoryFind.save();
await brandFind.save();
} catch (err) {
// console.log(err);
return res.status(500).send({ message: err });
}
res.status(200).send({ message: 'delete stock success' }); //thông báo xóa thành công
};
exports.getAllStocks = async (req, res) => {
//kiểm tra có đủ tham số hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data Invalid' });
}
let count = null;
try {
count = await stock.countDocuments({}); //đém số lượng cho việc phân trang
} catch (err) {
// console.log(err);
return res.status(500).send({ message: err });
}
let totalPage = parseInt((count - 1) / 9 + 1); //tính số trang cần phân chia
let { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
stock
.find() //lấy stock
.skip(9 * (parseInt(page) - 1))
.limit(9)
.exec((err, docs) => {
err
? res.status(404).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
exports.getStocks = async (req, res) => {
stock.find({ status: true }, (err, docs) => {
err ? res.status(404).send({ message: err }) : res.status(200).send({ data: docs });
});
};
//brand
exports.addBrand = async (req, res) => {
//kiểm tra có đủ tham số truyền vào hay không
if (typeof req.body.name === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
let brandFind = null;
const { name } = req.body;
try {
brandFind = await brand.find({ name: name }); //tìm kiếm brand theo tên
} catch (err) {
return res.status(500).send({ message: err });
}
if (brandFind.length > 0) {
// trường hợp có brand
return res.status(409).send({ message: 'Brand already exist' });
}
const newBrand = new brand({ name });
try {
await newBrand.save(); //lưu brand
} catch (err) {
// xuất ra lỗi
return res.status(500).send({ message: err });
}
res.status(201).send({ message: 'add brand success', data: newBrand }); // thông báo add brand thành công
};
exports.updateBrand = async (req, res) => {
//kiểm tra có đủ tham số truyền vào hay không
if (typeof req.body.id === 'undefined' || typeof req.body.name === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
const { id, name, status } = req.body; // khai báo biến
let brandFind;
try {
brandFind = await brand.findById(id); //tìm kiếm brand theo id
} catch (err) {
return res.status(500).send({ message: err }); //xuất lỗi
}
if (brandFind === null) {
//không tìm thấy brand thì xuất ra lỗi
return res.status(422).send({ message: 'brand not found' });
}
//update các thông tin của brand
brandFind.name = name;
brandFind.status = status;
try {
//lưu lại branđ đã thay đổi
await brandFind.save(err => {
err
? res.status(404).send({ message: 'add brand fail' })
: res.status(201).send({
message: 'update brand success',
brand: { name: name },
});
});
} catch (err) {
return res.status(500).send({ message: err }); //xuất lỗi
}
};
exports.deleteBrand = async (req, res) => {
brand.updateOne({ _id: req.params.id }, { $set: { status: false } }).exec(error => {
error ? res.status(400).send({ error }) : res.status(201).send('delete brand success');
});
};
exports.getAllBrands = async (req, res) => {
//kiểm tra có truyền đủ tham số hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
let count = null; //khai báo biến
try {
count = await brand.countDocuments(); //đếm brand
} catch (err) {
//xuất lỗi
return res.status(500).send({ message: err });
}
const totalPage = parseInt((count - 1) / 5 + 1); //tính tổng số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
brand
.find() //lấy brand
.skip(5 * (parseInt(page) - 1))
.limit(5)
.exec((err, docs) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
// category
exports.addCategory = async (req, res) => {
//kiểm tra có truyền đủ tham số hay không
if (typeof req.body.name === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo biến
const { name } = req.body;
let categoryFind;
try {
categoryFind = await category.find({ name: name }); //tìm kiếm theo name và path
} catch (err) {
//xuất lỗi
return res.status(500).send({ message: err });
}
if (categoryFind.length > 0) {
//trường hợp tìm thấy category
return res.status(409).send({ message: 'category already exist' });
}
const newCategory = new category({ name });
try {
newCategory.save(err => {
err
? res.status(500).send({ message: 'add categoy fail' })
: res.status(201).send({ message: 'add category success' });
});
} catch (err) {
return res.status(500).send({ message: err });
}
};
exports.updateCategory = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.body.id === 'undefined' || typeof req.body.name === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo biến cần thiết
const { id, name, status } = req.body;
let categoryFind = null;
try {
categoryFind = await category.findById(id); //tiến hành tìm kiếm category theo id
} catch (err) {
return res.status(500).send({ message: err });
}
if (categoryFind === null) {
//trường hợp không có category trong cơ sở dữ liệu
return res.status(422).send({ message: 'category not found' });
}
//tiến hành update các thông tin cho category
categoryFind.name = name;
categoryFind.status = status;
try {
//lưu các thay đổi
categoryFind.save(err => {
err
? res.status(500).send({ message: err })
: res.status(201).send({
message: 'update category success',
category: { name: name },
});
});
} catch (err) {
//xuất lỗi nếu không lưu được
return res.status(500).send({ message: err });
}
//thông báo update thành công
};
exports.deleteCategory = async (req, res) => {
category.updateOne({ _id: req.params.id }, { $set: { status: false } }).exec(error => {
error ? res.status(400).send({ error }) : res.status(200).send('delete product success');
});
};
exports.getAllCategorys = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data Invalid' });
}
//khai báo biến cần thiết
let count = null;
try {
count = await category.countDocuments(); //đém category
} catch (err) {
return res.status(500).send({ message: err });
}
const totalPage = parseInt((count - 1) / 5 + 1); //tính số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
category
.find() //get category them status = true
.skip(5 * (parseInt(page) - 1))
.limit(5)
.exec((err, docs) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
// user
exports.updateUser = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.body.email === 'undefined' || typeof req.body.name === 'undefined') {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo biến cần thiết
const { email, name, status } = req.body;
let userFind;
try {
userFind = await user.findOne({ email: email, is_admin: true }); //tiến hành tìm kiếm user theo email
} catch (err) {
return res.status(500).send({ message: err });
}
if (userFind === null) {
//trường hợp không có user trong cơ sở dữ liệu
return res.status(422).send({ message: 'user not found' });
}
//update thông tin cho user
userFind.name = name;
userFind.status = status;
try {
await userFind.save(); //lưu lại các thay đổi
} catch (err) {
//xuất lỗi nếu không lưu lại được
return res.status(500).send({ message: err });
}
//thông báo update thành công
res.status(200).send({
message: 'update user success',
user: {
email: userFind.email,
name: userFind.name,
},
});
};
exports.deleteUser = async (req, res) => {
user.updateOne({ _id: req.params.id }, { $set: { status: false } }).exec(error => {
error ? res.status(400).send({ error }) : res.status(201).send('delete product success');
});
};
exports.addUser = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (
typeof req.body.email === 'undefined' ||
typeof req.body.password === '<PASSWORD>' ||
typeof req.body.name === 'undefined' ||
typeof req.body.is_admin === 'undefined'
) {
return res.status(422).send({ message: 'Invalid data' });
}
//khai báo biến cần thiết
let { email, password, name, is_admin } = req.body;
let userFind = null;
//kiểm tra email có hợp lệ không, password phải trên 6 kí tự
if ((email.indexOf('@') === -1 && email.indexOf('.') === -1) || password.length < 6) {
res.status(422).send({ message: 'Invalid data or password too short' });
}
try {
userFind = await user.find({ email: email }); //tiến hành tìm kiếm user theo email
} catch (err) {
return res.status(500).send({ message: err });
}
if (userFind.length > 0) {
//trường hợp đã có user trong cơ sở dữ liệu
return res.status(409).send({ message: 'Email already exist' });
}
password = <PASSWORD>.hashSync(password, 10); //hash password
const newUser = new user({
//tạo mới user để thêm vào databasr
email: email,
name: name,
is_verify: true,
password: <PASSWORD>,
is_admin: is_admin,
});
try {
await newUser.save().then(function () {
newUser.generateJWT(); //tạo token
}); //lưu user vào database
} catch (err) {
//xuất lỗi nếu không lưu lại được
return res.status(500).send({ message: err });
}
res.status(201).send({ message: 'add user success' }); //thông báo thêm thành công
};
exports.login = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.body.email === 'undefined' || typeof req.body.password == 'undefined') {
return res.status(402).send({ message: 'Invalid data' });
}
//khai báo biến cần thiết
const { email, password } = req.body;
let userFind = null;
try {
userFind = await user.findOne({ email: email, is_admin: true }); //tiến hành tìm kiếm user theo email và is_admin
} catch (err) {
return res.send({ message: err });
}
if (userFind == null) {
//trường hợp không có user trong cơ sở dữ liệu
return res.status(422).send({ message: 'user not found' });
}
if (!userFind.is_verify) {
//trường hợp chưa verify
return res.status(401).send({ message: 'no_registration_confirmation' });
}
if (!bcrypt.compareSync(password, userFind.password)) {
//trường hợp sai mật khẩu
return res.status(422).send({ message: 'wrong password' });
}
//tạo token cho user khi đăng nhập
userFind.generateJWT();
//thông báo đăng nhập thành công
res.status(200).send({
message: 'success',
token: userFind.token,
user: {
email: userFind.email,
name: userFind.name,
id: userFind._id,
},
});
};
exports.getUsers = async (req, res) => {
//get toàn bộ user
user.find({ status: true }, (err, docs) => {
err ? res.status(500).send({ message: err }) : res.status(200).send({ data: docs });
});
};
exports.getAllUsers = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
//khai báo biến cần thiết
let count = null;
try {
count = await user.countDocuments({ is_admin: true }); //đếm admin
} catch (err) {
// console.log(err);
return res.status(500).send({ message: err });
}
const totalPage = parseInt((count - 1) / 9 + 1); //tính số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
//get user
user.find({ is_admin: true })
.skip(9 * (parseInt(page) - 1))
.limit(9)
.exec((err, docs) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
// Add new size
module.exports.addSize = (req, res) => {
const nSize = new size(req.body);
nSize.save((err, doc) => {
err
? res.status(500).json({ message: 'add size fail' })
: res.status(200).json({
message: 'add size success',
size: doc,
});
});
};
// Get all size -- find(query, projection)
module.exports.getAllSizes = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
//khai báo biến cần thiết
let count = null;
try {
count = await size.countDocuments({}); //đếm color
} catch (err) {
return res.status(500).send({ message: err });
}
if (count === null) {
return res.status(500).json({ message: 'sizes not found' });
}
const totalPage = parseInt((count - 1) / 5 + 1); //tính số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
//get colors
size.find({})
.skip(5 * (parseInt(page) - 1))
.limit(5)
.exec((err, docs) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
// Update size by ID
module.exports.updateSize = (req, res) => {
const { id, name, description, status } = req.body;
size.updateOne({ _id: id }, { $set: { name, description, status } }, (err, data) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ message: 'update size success' });
});
};
//Delete size
module.exports.deleteSize = (req, res) => {
size.updateOne({ _id: req.params.id }, { status: false }, (err, data) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ message: 'delete size success' });
});
};
// Add new color
module.exports.addColor = (req, res) => {
const nColor = new color(req.body);
nColor.save((err, doc) => {
err
? res.status(500).json({ message: err })
: res.status(200).json({
color: doc,
});
});
};
// Get all color -- find(query, projection)
module.exports.getAllColors = async (req, res) => {
//kiểm tra có truyền tham số đủ hay không
if (typeof req.params.page === 'undefined') {
return res.status(402).send({ message: 'Data invalid' });
}
//khai báo biến cần thiết
let count = null;
try {
count = await color.countDocuments({}); //đếm color
} catch (err) {
return res.status(500).send({ message: err });
}
if (count === null) return res.status(500).json({ message: 'colors not found' });
const totalPage = parseInt((count - 1) / 5 + 1); //tính số trang
const { page } = req.params;
if (parseInt(page) < 1 || parseInt(page) > totalPage) {
return res.status(200).send({ data: [], message: 'Invalid page', totalPage });
}
//get colors
color
.find({})
.skip(5 * (parseInt(page) - 1))
.limit(5)
.exec((err, docs) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ data: docs, totalPage });
});
};
// Update color
module.exports.updateColor = (req, res) => {
const { id, name, description, status } = req.body;
color.updateOne({ _id: id }, { $set: { name, description, status } }, (err, data) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ message: 'update color success' });
});
};
// Delete color
module.exports.deleteColor = (req, res) => {
color.updateOne({ _id: req.params.id }, { status: false }, (err, data) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ message: 'delete color success' });
});
};
//add promotion code
exports.addPromotionCode = async (req, res) => {
const exits = await promocode.findOne({ promotion_code: req.body.promotion_code });
if (exits) {
return res.json({ message: 'promotion code exits' });
}
const newPro = new promocode(req.body);
newPro.save((err, doc) => {
err ? res.status(500).json({ message: err }) : res.status(200).json({ doc });
});
};
//update promotion code
exports.updatePromoCode = async (req, res) => {
promocode.updateOne(
{ _id: req.body.id },
{
$set: req.body,
},
(err, data) => {
err
? res.status(500).send({ message: err })
: res.status(200).send({ message: 'update promotion code success' });
},
);
};
exports.getPromoCodes = async (req, res) => {
promocode.find({}, (err, data) => {
err ? res.status(500).send({ message: err }) : res.status(200).json(data);
});
};
//add banner
exports.addBanner = async (req, res) => {
let productFind;
try {
productFind = await product.find({
id_category: req.body.id_category,
});
} catch (err) {
return res.status(500).send({ message: err });
}
for (let pro of productFind) {
product
.updateOne(
{ _id: pro._id },
{
$set: {
sellPrice: pro.price - (pro.price * req.body.disCount) / 100,
},
},
{ upsert: true },
)
.then(err => {
// if(err) console.log('');
});
}
const newBanner = new banner(req.body);
newBanner.save((err, doc) => {
err
? res.status(500).send({ message: err })
: res.status(201).send({ message: 'add banner success' });
});
};
//update banner
exports.updateBanner = async (req, res) => {
let productFind;
try {
productFind = await product.find({ id_category: req.body.id_category });
} catch (err) {
return res.status(500).send({ message: err });
}
for (let pro of productFind) {
product
.updateOne(
{ _id: pro._id },
{
$set: {
sellPrice: pro.price - (pro.price * req.body.disCount) / 100,
},
},
{ upsert: true },
)
.then(err => {
// if(err) console.log('');
});
}
banner.updateOne({ _id: req.body.id }, { $set: req.body }, (err, data) => {
err
? res.status(500).send({ message: 'fail' })
: res.status(200).send({ message: 'Update success' });
});
};
//update status banner
exports.updateStatus = async (req, res) => {
banner.updateOne({ _id: req.params.id }, { status: false }, err => {
err
? res.status(500).send({ message: 'fail' })
: res.status(200).send({ message: 'Success' });
});
};
//get banner theo id
exports.getBanner = async (req, res) => {
banner.findOne({ _id: req.params.id }, (err, data) => {
err ? res.status(404).json({ message: 'Banner not found' }) : res.status(200).json(data);
});
};
//get all banner
exports.getBanners = async (req, res) => {
banner.find({}, (err, data) => {
err ? res.status(404).json({ message: 'Banners not found' }) : res.status(200).json(data);
});
};
<file_sep>/api/controllers/size.controller.js
const size = require('../models/size.model');
// Get all size -- find(query, projection)
module.exports.getSizes = (req, res) => {
size.find({ status: true }, (err, sizes) => {
err ? res.status(500).json({ message: 'size not found' }) : res.status(200).json({ sizes });
});
};
<file_sep>/api/routers/admin.router.js
const admin_controller = require('../controllers/admin.controller');
const order_controller = require('../controllers/order.controller');
const product_controller = require('../controllers/product.controller');
const upload = require('../config/multer');
module.exports = app => {
//product
app.route('/admin/addproduct').post(upload.array('files'), admin_controller.addProduct);
app.route('/admin/updateproduct').patch(upload.array('files'), admin_controller.updateProduct);
app.route('/admin/deleteproduct/:id').patch(admin_controller.deleteProduct);
app.route('/admin/getallproduct/:page').get(admin_controller.getAllProducts);
app.route('/admin/product/updateprice').post(product_controller.updatePriceByCategory);
//brand
app.route('/admin/addbrand').post(admin_controller.addBrand);
app.route('/admin/updatebrand').put(admin_controller.updateBrand);
app.route('/admin/deletebrand/:id').put(admin_controller.deleteBrand);
app.route('/admin/getallbrand/:page').get(admin_controller.getAllBrands);
//category
app.route('/admin/addcategory').post(admin_controller.addCategory);
app.route('/admin/updatecategory').put(admin_controller.updateCategory);
app.route('/admin/deletecategory/:id').put(admin_controller.deleteCategory);
app.route('/admin/getallcategory/:page').get(admin_controller.getAllCategorys);
//user
app.route('/admin/adduser').post(admin_controller.addUser);
app.route('/admin/deleteuser/:id').put(admin_controller.deleteUser);
app.route('/admin/updateuser').put(admin_controller.updateUser);
app.route('/admin/getAllUser/:page').get(admin_controller.getAllUsers);
app.route('/admin/getusers').get(admin_controller.getUsers);
app.route('/admin/login').post(admin_controller.login);
//order
app.route('/admin/order/:id').put(order_controller.deleteOrder);
app.route('/admin/order/byday').get(order_controller.getOrderByDay);
app.route('/admin/order/bymonth').get(order_controller.getOrderByMonth);
app.route('/admin/order/byyear/:year').get(order_controller.getOrderByYear);
app.route('/admin/order/top10').get(order_controller.getOrderTop10);
app.route('/admin/order/quantitybyyear/:year').get(order_controller.getQuantityByYear);
app.route('/admin/order/subtotal/:year').get(order_controller.getOrderSubTotalByYear);
app.route('/admin/order/subtotalcategory').post(
order_controller.getOrderSubTotalByYearAndCategory,
);
app.route('/admin/order/yearandcategory').post(order_controller.getQuantityByYearAndCategory);
app.route('/admin/order/countorder').post(order_controller.getQuantityOrderByYearAndCategory);
//stock
app.route('/admin/addstock').post(admin_controller.addStock);
app.route('/admin/updatestock').put(admin_controller.updateStock);
app.route('/admin/deletestock/:id').put(admin_controller.deleteStock);
app.route('/admin/getallstock/:page').get(admin_controller.getAllStocks);
//size
app.route('/admin/addsize').post(admin_controller.addSize);
app.route('/admin/updatesize').put(admin_controller.updateSize);
app.route('/admin/deletesize/:id').put(admin_controller.deleteSize);
app.route('/admin/getsizes/:page').get(admin_controller.getAllSizes);
//color
app.route('/admin/addcolor').post(admin_controller.addColor);
app.route('/admin/updatecolor').put(admin_controller.updateColor);
app.route('/admin/deletecolor/:id').put(admin_controller.deleteColor);
app.route('/admin/getcolors/:page').get(admin_controller.getAllColors);
//promotion code
app.route('/admin/promocodes/add').post(admin_controller.addPromotionCode);
app.route('/admin/promocodes/update').put(admin_controller.updatePromoCode);
app.route('/admin/promocodes').get(admin_controller.getPromoCodes);
//banner
app.route('/admin/banners/add').post(admin_controller.addBanner);
app.route('/admin/banners/update').put(admin_controller.updateBanner);
app.route('/admin/banners/:id').get(admin_controller.getBanner);
app.route('/admin/banners').get(admin_controller.getBanners);
};
<file_sep>/api/models/product.model.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
sizeProductSchema = require('../models/size_product').schema;
colorProductSchema = require('../models/color_product').schema;
const product = new Schema({
name: {
type: String,
required: [true, 'Không được bỏ trống'],
},
price: {
type: Number,
required: [true, 'Không được bỏ trống'],
},
sellPrice: Number,
id_category: {
type: Schema.Types.ObjectId,
ref: 'category',
},
quantity: {
type: Number,
require: true,
},
images: {
type: Array,
default: [],
},
id_brand: {
type: Schema.Types.ObjectId,
ref: 'brand',
},
description: {
type: String,
required: [true, 'Không được bỏ trống'],
},
colorProducts: {
type: Schema.Types.ObjectId,
ref: 'colorproduct',
},
sizeProducts: {
type: Schema.Types.ObjectId,
ref: 'sizeproduct',
},
numReviews: Number,
rating: Number,
status: {
type: Boolean,
default: true,
},
});
module.exports = mongoose.model('product', product);
<file_sep>/api/models/image_product.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const imageProduct = new mongoose.Schema({
images: {
type: Array,
default: [],
},
product: {
type: Schema.Types.ObjectId,
ref: 'product',
},
});
module.exports = mongoose.model('image_product', imageProduct);
<file_sep>/api/models/promocode.model.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const promocode = new Schema({
promotion_code: String,
status: {
type: Boolean,
default: true,
},
content: String,
price_discount: Number,
});
module.exports = mongoose.model('promocode', promocode);
<file_sep>/api/routers/comment.router.js
const comment_controller = require('../controllers/comment.controller');
module.exports = app => {
app.route('/comment/:id').get(comment_controller.getComment);
app.route('/comment').put(comment_controller.updateComment);
app.route('/comment/:id').put(comment_controller.deleteComment);
};
| 3f4ee74e906ba4c0c368ea50e32cd5185ca9628f | [
"JavaScript"
] | 34 | JavaScript | MinhPhu1999/tlcn_mern | d8759debfc9604dc49bd95c160dc877411c384a4 | df1d318b4b4185f32c214f13fc45fe6f64cab3df | |
refs/heads/master | <repo_name>osu361/OSU_Calculator<file_sep>/setup.py
from setuptools import setup, find_packages
setup(
name='OSU_Calculator',
version='1.0',
packages=find_packages(),
url='https://github.com/osu361/OSU_Calculator',
license='',
author='team32',
author_email='',
description='Computer Science Calculator', install_requires=['texteditor']
)
<file_sep>/README.md
# OSU_Calculator
## **To run Tkinter GUI on the engineering servers need to be able to open X11 terminal remotely**
### To Open X11 Terminal Remotely From Windows Using Cygwin
*(reference: https://x.cygwin.com/docs/ug/using-remote-apps.html)*
Make sure you have an X-client installed eg via Cygwin for example
1. modify cygwin/etc/sshd_config file so the following variables are:
* X11Forwarding yes
* X11DisplayOffset 10
* X11UseLocalhost yes
2. launch an Xterm in windows
3. Enter: export DISPLAY=:0.0
4. Enter: ssh -Y username@remote host
*To install necessary packages please run the setup.py file:*
python3 setup.py install
*depending on the permissions you might need to elevate using*
sudo or another admin privileges
*please make sure that you're using a version of Python with tKinter*
*To test if successful Enter:*
python3 -m tkinter
This should launch a tkinter test window
*To Launch multiple Xterms on the engineering server Enter:* xterm &
<file_sep>/calculator.py
# Python program to create a simple GUI
# calculator using Tkinter
# References:
# https://www.geeksforgeeks.org/python-simple-gui-calculator-using-tkinter/
# https://medium.com/@adeyinkaadegbenro/project-build-a-python-gui-calculator-fc92bddb744d
# import everything from tkinter module
from tkinter import *
import tkinter.font as font # added this
from math import *
import texteditor
# A global constant of sorts. The number of columns in the calculator
NUM_COLUMNS = 4
BTN_BG_COLOR = "black"
BTN_TXT_COLOR = "gray"
CALC_BG_COLOR = "black"
OPERATOR_LIST = {"urnary": ["log"], "binary": ["+", "-", "*", "/"]}
MAX_UNDO_REDO_STACK = 40
# EXAMPLE: class helloworld
class HelloWorld:
def __init__(self):
self.message = "Hello World!"
def printMessage(self, equation):
equation.set(self.message)
class Mathematics:
def basic(self, expression):
return str(eval(expression))
def log10(self, expression):
try:
floatValue = float(expression)
result = log10(floatValue)
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
class UnitConversion:
# Convert from lbs to kg
def toKg(self, expression):
conversionFactor = 0.453592
try:
floatValue = float(expression)
result = conversionFactor*floatValue
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
# Convert from kg to lbs
def toLbs(self, expression):
conversionFactor = 2.20462
try:
floatValue = float(expression)
result = conversionFactor*floatValue
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
# Convert from from meters to feet
def toFt(self, expression):
conversionFactor = 3.28084
try:
floatValue = float(expression)
result = conversionFactor*floatValue
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
# Convert from from feet to meters
def toM(self, expression):
conversionFactor = 0.3048
try:
floatValue = float(expression)
result = conversionFactor*floatValue
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
# Convert from from kilometers to miles
def toMi(self, expression):
conversionFactor = 0.621371
try:
floatValue = float(expression)
result = conversionFactor*floatValue
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
# Convert from from miles to kilometers
def toKm(self, expression):
conversionFactor = 1.60934
try:
floatValue = float(expression)
result = conversionFactor*floatValue
strVal = format("%8g" % (result))
except:
result = "error"
return strVal
# convert Celsius to Fahrenheit
def toFdeg(self, expression):
try:
res = (float(expression) * 1.8) + 32
strVal = format("%8g" % (res))
except:
res = "error"
return strVal
# convert Fahrenheit to Celsius
def toCdeg(self, expression):
try:
res = (float(expression) - 32) * (5/9)
strVal = format("%8g" % (res))
except:
res = "error"
return strVal
# convert Gallon to Liter
def toLiter(self, expression):
try:
res = float(expression) * 3.78541
strVal = format("%8g" % (res))
except:
res = "error"
return strVal
# convert Liter to Gallon
def toGal(self, expression):
try:
res = float(expression) / 3.78541
strVal = format("%8g" % (res))
except:
res = "error"
return strVal
# convert Inch to Centimeter
def toCm(self, expression):
try:
res = float(expression) * 2.54
strVal = format("%8g" % (res))
except:
res = "error"
return strVal
# convert Centimeter to
def toIn(self, expression):
try:
res = float(expression) / 2.54
strVal = format("%8g" % (res))
except:
res = "error"
return strVal
class undoRedo:
def __init__(self):
self.undo_Stack = []
self.redo_Stack = []
def get_top_undo(self):
"""function to get top of undo stack"""
if len(self.undo_Stack) > 0:
return self.undo_Stack[-1]
else:
return None
def addItemUndo(self, element_to_add, flag=True):
"""function to add new item to the undo stack"""
self.undo_Stack.append(element_to_add)
self.trim_stack(flag)
def addOperationUndo(self, operand1, operand2, operator, flag=True):
"""function to add a complete operation to the undo stack"""
operation = {'num1': operand1, 'num2': operand2, 'op': operator}
self.undo_Stack[-1] = operation
self.trim_stack(flag)
def updateUndo(self, element_to_add):
"""function to update the top of the undo stack with the new appropriate value"""
self.undo_Stack[-1] = element_to_add
def userUndo(self):
"""Function to undo a previous action and load the undone action to the redo stack"""
self.redo_Stack.append(self.undo_Stack.pop())
self.trim_stack(False)
if len(self.undo_Stack) > 0:
return self.undo_Stack[-1]
else:
return None
def userRedo(self):
"""function to redo an action that was previously undone"""
if len(self.redo_Stack) == 0:
return None
grabbed = self.redo_Stack.pop()
self.undo_Stack.append(grabbed)
self.trim_stack(False)
return grabbed
def trim_stack(self, clear_redo):
"""Function to delete the oldest entries on the stack if the stack becomes too large"""
if len(self.undo_Stack) > MAX_UNDO_REDO_STACK:
self.undo_Stack = self.undo_Stack[1:]
if len(self.redo_Stack) > MAX_UNDO_REDO_STACK:
self.redo_Stack = self.redo_Stack[1:]
if clear_redo:
self.redo_Stack.clear()
class Calculator:
def __init__(self, master):
self.master = master
# set the background colour of GUI window
self.master.configure(background=CALC_BG_COLOR)
# screen font 30
self.screenFont = font.Font(weight="bold", size=30) # added this
# button font 20
self.buttonFont = font.Font(weight="bold", size=20) # added this
# Calculation log font 12
self.buttonFont2 = font.Font(weight="bold", size=12)
# use object instance to access math functions in the Mathematics class
self.my_math = Mathematics()
# EXAMPLE: Create a HelloWorld class object to call the HelloWorld class functions
self.my_hello = HelloWorld()
# Creat a Conversion class object to call the Conversion class functions
self.my_unitConvert = UnitConversion()
# Create an Undo/Redo class object for undo/redo functionality
self.URstacks = undoRedo()
# set the title of GUI window
self.master.title("Calculator")
# set the configuration of GUI window
# According to Geeks for Geeks the below geomtry declaration is not necessary and imposes predefined
# size limits which would introduce the need for hard coding
# https://www.geeksforgeeks.org/python-geometry-method-in-tkinter/
# reference: https://www.geeksforgeeks.org/python-grid-method-in-tkinter/
# self.master.geometry("265x125")
# self.master.geometry("400x300")
self.numColumns = NUM_COLUMNS # use this to change the number of columns
# example for adding other functionality
self.Flag = ""
# will hold the expression entered by the user
self.expression = ""
self.display_text = ""
self.expFiltered = "" # holds value with zeros removed
# for more information on tkinter variables see:
# https://www.geeksforgeeks.org/python-setting-and-retrieving-values-of-tkinter-variable/
# StringVar() is the variable class
# we create an instance of this class
# this holds a variable in a class from ktinker
self.equation = StringVar()
# create the text entry box for
# showing the expression .
self.expression_field = Entry(
self.master, textvariable=self.equation, font=self.screenFont)
# grid method is used for placing
# the widgets at respective positions
# in table like structure .
Grid.rowconfigure(self.master, 1, weight=1)
self.expression_field.grid(
columnspan=self.numColumns, ipadx=70, sticky=(N, S, E, W))
# variables to save user entered operands/operator
self.operands = [None, None]
self.e_operands = [None, None]
self.operator = None
# Flag used to prohibit user from using two decimals in one number
self.is_decimal = [False, False]
self.clear_stack = False
# Variables used to save answer
self.saved_answer = None
self.previous_answer = None
self.equation.set('0')
# history of all calculations for session
self.history = []
self.calculationLog()
Grid.rowconfigure(self.master, 1, weight=1)
# Label(self.master, text="calculation log").grid(row=1, column=0,
# columnspan=2, sticky=(N, S, E, W))
Button(self.master, text='<--use', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=self.useLog, width=7, height=1).grid(
row=1, column=2, sticky=(N, S, E, W))
Button(self.master, text='hist', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=self.showHistory, width=7, height=1).grid(
row=1, column=3, sticky=(N, S, E, W))
# buttons for unit conversion:
kg_button = "\u2b62" + " kg"
lbs_button = "\u2b62" + " lbs"
ft_button = "\u2b62" + " ft"
m_button = "\u2b62" + " m"
mi_button = "\u2b62" + " mi"
km_button = "\u2b62" + " km"
cDeg_button = "\u2b62 \u00B0" + "C"
fDeg_button = "\u2b62 \u00B0" + "F"
liter_button = "\u2b62" + " L"
gal_button = "\u2b62" + " gal"
in_button = "\u2b62" + " in"
cm_button = "\u2b62" + " cm"
self.equation.set('')
self.buttonList = [
Button(self.master, text='save', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.saveAnswer(), width=7, height=1),
Button(self.master, text='load', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.loadAnswer(), width=7, height=1),
Button(self.master, text='clr save', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.clearAnswer(), width=7, height=1),
Button(self.master, text=u"\u232B", fg=BTN_TXT_COLOR,
bg=BTN_BG_COLOR, command=self.clear, width=7, height=1),
Button(self.master, text=' ( ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('('), width=7, height=1),
Button(self.master, text=' ) ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press(')'), width=7, height=1),
Button(self.master, text=' UNDO ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.undo(), width=7, height=1),
Button(self.master, text=' REDO ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.redo(), width=7, height=1),
Button(self.master, text=' 7 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('7'), width=7, height=1),
Button(self.master, text=' 8 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('8'), width=7, height=1),
Button(self.master, text=' 9 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('9'), width=7, height=1),
Button(self.master, text=u"\u00F7", fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press("/"), width=7, height=1),
Button(self.master, text=' 4 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('4'), width=7, height=1),
Button(self.master, text=' 5 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('5'), width=7, height=1),
Button(self.master, text=' 6 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('6'), width=7, height=1),
Button(self.master, text=' * ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press("*"), width=7, height=1),
Button(self.master, text=' 1 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('1'), width=7, height=1),
Button(self.master, text=' 2 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('2'), width=7, height=1),
Button(self.master, text=' 3 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('3'), width=7, height=1),
Button(self.master, text=' - ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press("-"), width=7, height=1),
Button(self.master, text=' . ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press("."), width=7, height=1),
Button(self.master, text=' 0 ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('0'), width=7, height=1),
Button(self.master, text='E', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press('E'), width=7, height=1),
Button(self.master, text=' + ', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press("+"), width=7, height=1),
Button(self.master, text='log', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.setFlag("log"), width=7, height=1),
Button(self.master, text=kg_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("Kg"), width=7, height=1),
Button(self.master, text=lbs_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("Lbs"), width=7, height=1),
Button(self.master, text=ft_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("Ft"), width=7, height=1),
Button(self.master, text=m_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("M"), width=7, height=1),
Button(self.master, text=mi_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("Mi"), width=7, height=1),
Button(self.master, text=km_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("Km"), width=7, height=1),
Button(self.master, text=cDeg_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("cDeg"), width=7, height=1),
Button(self.master, text=fDeg_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("fDeg"), width=7, height=1),
Button(self.master, text=liter_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("liter"), width=7, height=1),
Button(self.master, text=gal_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("gal"), width=7, height=1),
Button(self.master, text=cm_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("cm"), width=7, height=1),
Button(self.master, text=in_button, fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.unitConvert("in"), width=7, height=1),
Button(self.master, text='+/-', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
command=lambda: self.press("negative"),
width=7, height=1),
# EXAMPLE: add helloworld button
# Button(self.master, text='HW', fg=BTN_TXT_COLOR, bg=BTN_BG_COLOR,
# command=lambda: self.my_hello.printMessage(self.equation),
# width=7, height=1),
Button(self.master, text=' = ', fg=BTN_TXT_COLOR,
bg=BTN_BG_COLOR, command=self.equalpress, width=28, height=1,
font=self.buttonFont),
]
self.lengthOfbuttonList = len(self.buttonList)
# row number starts from 1 since row 0 is for the display
self.numRows = self.lengthOfbuttonList // self.numColumns + 2
index = 0
for row in range(2, self.numRows):
Grid.rowconfigure(self.master, row, weight=1) # sticky
for column in range(self.numColumns):
Grid.columnconfigure(self.master, column, weight=1) # sticky
self.buttonList[index].grid(
row=row, column=column, sticky=(N, S, E, W)) # sticky
# print("row= ",row, " column= ", column)
index += 1
print("after row-cols full index = ", index)
row += 1
column = 0
Grid.rowconfigure(self.master, row, weight=1) # sticky
for i in range(index, self.lengthOfbuttonList - 1):
self.buttonList[index].grid(
row=row, column=column, sticky=(N, S, E, W)) # sticky
# print("row= ",row, " column= ", column)
index += 1
column += 1
# assign the "=" button to it own row at the bottom of the calculator
row += 1
Grid.rowconfigure(self.master, row, weight=1) # sticky
print("length of button list = ", self.lengthOfbuttonList)
print("index= ", index)
self.buttonList[self.lengthOfbuttonList-1].grid(
row=row, column=0, columnspan=self.numColumns, sticky=(N, S, E, W))
# listen for enter key
self.master.bind('<Return>', self.enterKey)
def setFlag(self, flag):
self.Flag = flag
self.press(flag)
# Function to update expression
# in the text entry box
def press(self, num: str):
# point out the global expression variable
# global expression
# Refactoring for building operands
is_operand_data = num.isnumeric() or num == "negative"
is_operand_data = is_operand_data or num == "."
# TODO convert E button to operator and add sign button to enable #E-# functionality
is_operand_data = is_operand_data or num == "E"
if is_operand_data:
if self.operator is None:
self.set_operand(0, num)
else:
self.set_operand(1, num)
elif num in OPERATOR_LIST["urnary"]:
self.eval_existing_expression("urnary", num)
elif num in OPERATOR_LIST["binary"]:
self.eval_existing_expression("binary", num)
else:
self.displayError()
self.build_display_text()
"""
# concatenation of string
self.expression += str(num)
# update the expression by using set method
self.equation.set(self.expression)
"""
def build_display_text(self):
if self.operands[0] is not None:
self.display_text = self.operands[0]
if self.e_operands[0] is not None:
self.display_text += "E" + self.e_operands[0]
if self.operator is not None:
self.display_text += self.operator
if self.operands[1] is not None:
self.display_text += self.operands[1]
if self.e_operands[1] is not None:
self.display_text += "E" + self.e_operands[1]
self.equation.set(self.display_text)
elif self.previous_answer is not None:
self.display_text = self.previous_answer
self.equation.set(self.display_text)
else:
self.displayError()
def eval_existing_expression(self, operator_type, operator):
if self.operands[1] is not None:
# TODO load OP[0] operator and OP[1] flag for second add
self.equalpress(operator_type != "binary")
self.operands[0] = self.previous_answer
if self.operands[0] is None and self.previous_answer is not None:
self.operands[0] = self.previous_answer
if self.operands[0] is not None:
# TODO load operator
if operator_type == "urnary":
# TODO and if not flag load OP[0]
self.equalpress()
else:
self.set_operator(operator)
self.URstacks.addItemUndo(operator)
else:
self.displayError()
def set_operand(self, idx, num):
if num != "." or not self.is_decimal[idx]:
if self.operands[idx] is None: # TODO op[0] == 0
# TODO Load/update current val of operand
self.operands[idx] = num
self.URstacks.addItemUndo(self.operands[idx])
else:
if num == "negative":
if self.e_operands[idx] is not None:
if self.e_operands[idx][0] == '-':
self.e_operands[idx] = self.e_operands[idx][1:]
else:
self.e_operands[idx] = "-" + self.e_operands[idx]
else:
if self.operands[idx][0] == '-':
self.operands[idx] = self.operands[idx][1:]
else:
self.operands[idx] = "-" + self.operands[idx]
elif num == "E":
add_E_to_operand = len(
self.operands[idx]) > 0 and self.operands[idx][0].isnumeric()
add_E_to_operand = add_E_to_operand or len(
self.operands[idx]) > 1 and self.operands[idx][0] == "-"
# add_E_to_operand = add_E_to_operand and self.e_index < 0
if add_E_to_operand:
self.e_operands[idx] = "0"
else:
if self.e_operands[idx] is not None:
if self.e_operands[idx][0] == "0":
self.e_operands[idx] = num
elif self.e_operands[idx][0] == "-" and (len(self.e_operands[idx][0]) == 1 or self.e_operands[idx][1] == "0"):
self.e_operands[idx] = self.e_operands[idx][0] + num
else:
self.e_operands[idx] += num
else:
if self.operands[idx][0] == "0":
self.operands[idx] = num
elif self.operands[idx][0] == "-" and (len(self.operands[idx][0]) == 1 or self.operands[idx][1] == "0"):
self.operands[idx] = self.operands[idx][0] + num
else:
self.operands[idx] += num
if self.e_operands[idx] is None:
self.URstacks.updateUndo(self.operands[idx])
else:
self.URstacks.updateUndo(
self.operands[idx] + "E" + self.e_operands[idx])
else:
self.displayError("invalid key")
def set_operator(self, operator):
self.operator = operator
self.e_index = -1
# Function to evaluate the final expression
def equalpress(self, ignore_flag=False):
# Try and except statement is used
# for handling the errors like zero
# division error etc.
# Put that code inside the try block
# which may generate the error
#self.expression_field = self.fixZeros(self.expression_field.get())
# removes leading zeros
self.expFiltered = self.fixZeros(self.expression_field.get())
try:
# global expression
# eval function evaluate the expression
# and str function convert the result
# into string
if self.Flag == "log" and not ignore_flag: # example of implementing a function
#total = self.my_math.log10(self.expression_field.get())
total = self.my_math.log10(self.expFiltered)
self.expFiltered = self.expFiltered + self.Flag
self.Flag = ""
else:
# eval takes a string expression and evaluates it
#total = self.my_math.basic(self.expression_field.get())
total = self.my_math.basic(self.expFiltered)
floatValue = float(total)
strVal = format("%8g" % (floatValue))
total = strVal.strip()
self.equation.set(total)
# save equations and answers to self.history list
self.saveHistory()
self.calculationLog()
if self.operands[1] is not None:
temp_num1 = self.operands[0]
if self.e_operands[0] is not None:
temp_num1 += "E" + self.e_operands[0]
temp_num2 = self.operands[1]
if self.e_operands[1] is not None:
temp_num2 += "E" + self.e_operands[1]
self.URstacks.addOperationUndo(
temp_num1, temp_num2, self.operator)
self.previous_answer = total # save result of operation to previous answer variable
self.operands[0] = self.previous_answer
self.URstacks.addItemUndo(self.operands[0])
# initialze the expression variable
# by empty string
self.operands[0] = None
self.e_operands[0] = None
self.operator = None
self.operands[1] = None
self.e_operands[1] = None
# if error is generate then handle
# by the except block
except:
self.displayError()
# Function to clear the contents
# of text entry box
def unitConvert(self, buttonName):
try:
if (buttonName == "Kg"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toKg(self.expression)
self.equation.set(result + " (kg)")
elif (buttonName == "Lbs"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toLbs(self.expression)
self.equation.set(result + " (lbs)")
elif (buttonName == "Ft"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toFt(self.expression)
self.equation.set(result + " (ft)")
elif (buttonName == "M"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toM(self.expression)
self.equation.set(result + " (m)")
elif (buttonName == "Mi"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toMi(self.expression)
self.equation.set(result + " (mi)")
elif (buttonName == "Km"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toKm(self.expression)
self.equation.set(result + " (km)")
elif (buttonName == "fDeg"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toFdeg(self.expression)
self.equation.set(result + " (\u2b62 \u00B0" + "F)")
elif (buttonName == "cDeg"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toCdeg(self.expression)
self.equation.set(result + " (\u2b62 \u00B0" + "C)")
elif (buttonName == "liter"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toLiter(self.expression)
self.equation.set(result + " (L)")
elif (buttonName == "gal"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toGal(self.expression)
self.equation.set(result + " (gal)")
elif (buttonName == "cm"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toCm(self.expression)
self.equation.set(result + " (cm)")
elif (buttonName == "in"):
self.expression = self.my_math.basic(
self.expression_field.get())
result = self.my_unitConvert.toIn(self.expression)
self.equation.set(result + " (in)")
# self.equation.set(result)
self.expFiltered = self.expression
self.saveHistory()
self.calculationLog()
self.previous_answer = result # save result of operation to previous answer variable
self.operands[0] = self.previous_answer
# initialze the expression variable
# by empty string
self.expression = ""
self.Flag = ""
self.operator = None
self.operands[1] = None
except:
self.displayError()
def clear(self):
# global expression
self.operands[0] = None
self.operands[1] = None
self.e_operands[0] = None
self.e_operands[1] = None
self.operator = None
self.clear_stack = not self.clear_stack # logic for stack size
self.expression = ""
self.Flag = ""
self.equation.set("")
# Function to save the previous answer for later use
def saveAnswer(self):
self.saved_answer = self.previous_answer
# Function to load a previously saved answer, displays an error if no saved answer exists
def loadAnswer(self):
if self.saved_answer is None:
self.displayError()
else:
self.expression = str(self.saved_answer)
self.equation.set(self.expression)
# function to erase the previously saved answer
def clearAnswer(self):
self.saved_answer = None
# function to display error to the screen if the user tries to perform illegal operations
def displayError(self, msg=" error "):
self.equation.set(msg)
self.expression = ""
def enterKey(self, event):
self.equalpress()
# removes leading zeros
def fixZeros(self, s):
s = list(s)
i = 0
while True:
if i == 0:
if s[i] == '0' and s[i+1].isdigit():
s.pop(i)
i -= 1
elif i == len(s)-1 or len(s) < 3:
break
elif s[i] == '0' and not s[i-1].isdigit() and s[i+1].isdigit():
s.pop(i)
i -= 1
i += 1
return "".join(s)
# save expression and answer to self.history
def saveHistory(self):
if self.expFiltered != self.equation.get():
self.history.append(self.expFiltered + "=" + self.equation.get())
# write self.history to file
def history2Txt(self):
# create output file
out_file = open("calc_history.txt", "w")
# add elements in self.history to .txt file
for i in range(len(self.history)):
out_file.write(self.history[i] + '.\n')
out_file.close()
# opens calc_history.txt file using the systems default editor
def showHistory(self):
# write self.history to file
self.history2Txt()
# uses texteditor function to open file with default editor
texteditor.open(filename='calc_history.txt')
def calculationLog(self, *args):
if not self.history:
self.options = ['Calculation Log']
else:
self.options = self.history
self.variable = StringVar(self.master)
self.variable.set(self.options[-1]) # default value
b5 = OptionMenu(self.master, self.variable, *self.options)
b5.config(font=self.buttonFont2, width=10)
b5.grid(row=1, column=0, columnspan=2, sticky=(N, S, E, W))
def useLog(self):
self.clear()
eq = self.variable.get().split('=')[1]
eq = eq.split('(')[0]
self.expression = eq
self.equation.set(eq)
self.equalpress()
def undo(self):
"""undoes the mose recent action"""
result = self.URstacks.userUndo()
if type(result) == dict: # if the most recent action cleared previous operands/operator reload them
temp_num1 = result["num1"]
temp_num2 = result["num2"]
self.operator = temp_op = result["op"]
self.previous_answer = str(eval(temp_num1 + temp_op + temp_num2))
self.load_operand(temp_num1, 0)
self.load_operand(temp_num2, 1)
elif result is not None: # load appropriate data from the result
if result in OPERATOR_LIST["binary"]:
self.operands[1] = None
elif self.operands[1] is not None:
self.operands[1] is None
elif self.operator is not None:
self.operator = None
else:
self.load_operand(result, 0)
else: # clear data if undoing the first action
if self.operands[0] is not None and self.operator is None:
self.operands[0] = None
self.previous_answer = 0
elif self.operator is not None and self.operands[1] is None:
self.operator = None
else:
self.displayError("nothing to undo")
self.build_display_text()
def redo(self):
"""redoes an undone action"""
ref = self.URstacks.get_top_undo()
result = self.URstacks.userRedo()
if result is not None: # if action to be redone redo it
if type(ref) == dict: # if previous action is an operation load op1 from result
self.load_operand(result, 0)
self.operator = None
self.operands[1] = None
else: # load appropriate data from result
if type(result) == dict:
temp_num1 = result["num1"]
temp_num2 = result["num2"]
self.operator = result["op"]
self.load_operand(temp_num1, 0)
self.load_operand(temp_num2, 1)
elif result in OPERATOR_LIST["binary"]:
self.operator = result
elif self.operator is not None:
self.load_operand(result, 1)
else:
self.load_operand(result, 0)
self.build_display_text()
else:
self.displayError("nothing to redo")
def load_operand(self, raw_data, idx):
"""function to load operand so E notation is properly loaded"""
if "E" in raw_data or "e" in raw_data:
raw_data = raw_data.upper().split("E")
self.operands[idx] = raw_data[0]
self.e_operands[idx] = raw_data[1]
else:
self.operands[idx] = raw_data
self.e_operands[idx] = None
# Driver code
if __name__ == "__main__":
# create a GUI window
root = Tk()
root.geometry("300x300")
my_gui = Calculator(root)
root.mainloop()
| 9168dd29e5a5f78efe7d00001e0491e2aa4e7b28 | [
"Markdown",
"Python"
] | 3 | Python | osu361/OSU_Calculator | ead927a4558bc85014e39d6aa51854800bc50061 | ba7cfc9bbcb356d1850f3b6e66d36e6e9c78dcd3 | |
refs/heads/master | <repo_name>khizaear/payapp<file_sep>/README.md
SandGlass
=========
<file_sep>/routes/index.js
var express = require('express');
var router = express.Router();
var httpreq = require('httpreq');
var services=require('./services');
var MongoClient = require('mongodb').MongoClient;
var process=require('process')
/* GET home page. */
router.get('/', function(req, res, next) {
if (process.pid) {
console.log('This process is your pid ' + process.pid);
console.log('This platform is ' + process.platform);
// [ 27, 30, 46, 1000 ]
}
var output=services.getcomponents();
res.render('default', { data:output, user:services.getuser()});
//console.log(output);
});
router.get('/profile/search/:id',function(req, res, next) {
MongoClient.connect('mongodb://127.0.0.1:27017/sandglass', function(err, db) {
db.open(function(err,db){
var profiles=req.params.id
db.collection("logs").find({'name':profiles},{'name':true}).toArray(function(err, results) {
res.send({"datas":results});
});
});
});
});
module.exports = router;
<file_sep>/routes/admin.js
var express = require('express');
var router = express.Router();
var httpreq = require('httpreq');
var services=require('./services');
var MongoClient = require('mongodb').MongoClient;
var ObjectID = require('mongodb').ObjectID;
var util = require('util');
router.get('/', function(req, res, next) {
MongoClient.connect("mongodb://localhost:27017/sandglass", function(err, db) {
db.open(function(err,db){
db.collection('logs', function(err, collection) {
collection.find().toArray(function(err, items) {
res.render('admin', {data:services.getcomponents(), profiles:items, user:services.getuser()});
});
});
});
});
});
router.post('/createprofile', function(req, res, next) {
MongoClient.connect("mongodb://localhost:27017/sandglass", function(err, db) {
if (err) throw err;
var profile=JSON.parse(req.body.datas);
var profilename=profile.profile.name;
var profiletype="profiles";
var params=profile.profile;
var desc=profile.desc;
var documents=profile.documents;
var created=services.getdate();
var createdby=services.getuser();
var document ={"name":profilename,"params":params,"type":"profiles","documents":document,"created_at":created,"created_by":createdby}
db.collection('logs').insert(document, function(err, records) {
console.log(document);
});
});
res.end(JSON.stringify(req.body.datas));
});
router.get('/stopserve', function(req, res, next) {
//var stopser=services.stopServ("feelserv","STAGE2C8998");
httpreq.get("http://psm.corp.ebay.com/stopserv/?servname=feelserv&stage=STAGE2C8998", function (err, data){
//output=JSON.parse(data.body);
console.log(err);
console.log(data);
res.end(data);
});
});
router.get('/viewprofile/:id', function(req, res, next) {
MongoClient.connect('mongodb://127.0.0.1:27017/sandglass', function(err, db) {
db.open(function(err,db){
db.collection('logs', function(err, collection) {
var o_id = new ObjectID(req.params.id);
collection.find({'_id':o_id}).toArray(function(err, items) {
res.render('blank', {data:items});
console.log(items);
});
});
});
});
});
router.get('/editprofile/:id', function(req, res, next) {
MongoClient.connect('mongodb://127.0.0.1:27017/sandglass', function(err, db) {
db.open(function(err,db){
db.collection('logs', function(err, collection) {
var o_id = new ObjectID(req.params.id);
collection.find({'_id':o_id}).toArray(function(err, items) {
res.render('editprofileid', {data:items,services:services.getcomponents()});
console.log(items);
});
});
});
});
});
router.post('/createprofilewithid', function(req, res, next) {
MongoClient.connect('mongodb://127.0.0.1:27017/sandglass', function(err, db) {
var profile=JSON.parse(req.body.datas);
var profilename=profile.profile.name;
var profiletype="profiles";
var params=profile.profile;
var desc=profile.desc;
var documents=profile.documents;
var created=services.getdate();
var createdby=services.getuser();
var document ={"name":profilename,"params":params,"type":"profiles","documents":document,"created_at":created,"created_by":createdby}
db.open(function(err,db){
db.collection('logs', function(err, collection) {
//var o_id = new ObjectID(req.params.id);
collection.find({"name":profilename},{"_id":1},function(err, items) {
console.log(items);
//res.send(items);
});
//end collection.find
collection.remove({"name":profilename});
db.collection('logs').insert({"name":profilename,"params":params,"type":"profiles","documents":document,"created_at":created,"created_by":createdby}, function(err, records) {
console.log(document);
res.end();
});
});
});
});
});
router.get('/deleteprofile/:id', function(req, res, next) {
MongoClient.connect('mongodb://127.0.0.1:27017/sandglass', function(err, db) {
db.open(function(err,db){
db.collection('logs', function(err, collection) {
var o_id = new ObjectID(req.params.id);
collection.remove({'_id':o_id});
res.end();
});
});
});
});
module.exports = router;
| d72f3d24ef2e69412e6ee304cdc62630e0dd9beb | [
"Markdown",
"JavaScript"
] | 3 | Markdown | khizaear/payapp | 9103b03be18d41978e84073acafe64c4367519fd | 1912f5e9f755f58cece4e5ecdee450a89357f1b7 | |
refs/heads/master | <file_sep>#include <stdio.h>
#include <stdlib.h>
#include <signal.h>
#include <unistd.h>
#include <sys/wait.h>
#include <sys/types.h>
int main(int argc, char *argv[]) {
int status;
pid_t pid = fork();
/* Ignore signals from keyboard */
signal(SIGINT, SIG_IGN);
signal(SIGTSTP, SIG_IGN);
signal(SIGQUIT, SIG_IGN);
if (pid == 0) {
char ppid[12];
sprintf(ppid, "%d", getppid());
char *aargv[4] = {"alien-js", ppid, NULL};
execvp( "alien-js", aargv );
printf("Count not launch alien shell...\n");
exit(0);
} else {
while (wait(&status) != pid);
return status;
}
}
<file_sep>cmake_minimum_required(VERSION 3.10)
project(Alien VERSION 0.0.1)
add_executable(alien alien.c)
install(TARGETS alien RUNTIME DESTINATION bin)
<file_sep>const REPL = require("./repl");
const keys = require("@alien.sh/repl/keys");
const builtins = require("./builtins");
const eventEmitter = require("events");
const { generate } = require("./parser/index")({ scope: { builtins } });
const scope = {
builtins,
async pipe(...args) {
const procs = [];
for (const [index, arg] of Object.entries(args)) {
const i = Number(index);
const stdio =
i == args.length - 1 ? ["pipe", "inherit", "inherit"] : "pipe";
const proc = await arg.eval(scope, stdio);
procs[i] = proc;
if (i > 0) {
procs[i - 1].stdout.pipe(proc.stdin);
}
}
const result = await procs[procs.length - 1].results();
await Promise.all(procs);
console.log(result);
},
cd(path) {
try {
process.chdir(path);
} catch {
console.log(`Cannot cd to ${path}`);
}
},
exit() {
process.exit(0);
}
};
const SIGTSTP = 18;
const handle = async (core, line) => {
const atom = await core.generate(line);
const proc = await atom.eval(scope, "inherit");
if (proc && proc.results) {
return new Promise(async resolve => {
core.signals.on(SIGTSTP, function() {
console.log("meow");
resolve();
});
const results = await proc.results();
return resolve(results);
});
}
};
const start = core => {
const { stdin, stdout } = process;
core.repl = new core.REPL({
core,
stdin,
stdout,
prompts: { prompt: "\ue36e >" }
});
for (const plugin of core.plugins) {
plugin(core);
}
};
const signals = new eventEmitter();
const core = {
start,
handle,
scope,
generate,
REPL,
keys,
signals,
onBeforePrint: [],
onBeforeProcess: [],
onSig: []
};
module.exports = core;
<file_sep># Alien
Alien Shell
## Installation
There are no installation instructions because this isn't ready for general use. I will provide instructions when the below issues are fixed.
## Known issues / TODO
- [ ] Ctrl+c, Ctrl+z, ... does not work
- [X] Tab completion is buggy
- [ ] No history
- [X] When in a process, keyboard input does not work
## Screenshots

Commands:

<file_sep>const patterns = {
string: /^"(?:[^"]|\\")*"/,
backtick: /^`/,
left_paren: /^[(]/,
right_paren: /^[)]/,
number: /^([+-]?(\d+(\.\d+)?)|(\.\d+))(?= |\n|$)/,
boolean: /^(true|false)(?=[ \n()]|$)/,
date: /^\d{4}-\d{2}-\d{2}(?=[ \n()]|$)/,
symbol: /^[^ \r\n\t()]+/i,
newline: /^\r?\n/,
space: /^ +/
};
module.exports = patterns;
<file_sep>#!/usr/bin/env node
const fs = require("fs");
const path = require("path");
const { execSync } = require("child_process");
const buildDir = path.join(__dirname, "build");
fs.mkdirSync(buildDir, { recursive: true }, () => {});
const prefix = process.argv[2];
execSync(`cmake -DCMAKE_INSTALL_PREFIX:PATH=${prefix} ..`, { cwd: buildDir });
execSync("cmake --build . --target install", { cwd: buildDir });
<file_sep>const repl = require("@alien.sh/repl");
const hasEqualParens = str =>
(str.match(/\(/g) || []).length == (str.match(/\)/g) || []).length;
class AlienRepl extends repl {
constructor({ core, ...rest }) {
super(rest);
this.core = core;
this.keyEaters["\r"] = [
function(key) {
if (this.isBusy) return;
if (this.currentInput && hasEqualParens(this.currentInput)) {
this.stdout.write("\n");
this.processInput();
} else {
this.insertAtCursor(key);
}
}
];
}
preprint() {
for (const hook of this.core.onBeforePrint) {
hook.call(this);
}
}
async processInput() {
let src = this.currentInput;
for (const hook of this.core.onBeforeProcess) {
src = hook.call(this, src);
}
this.y = 0;
this.x = 0;
this.currentInput = "";
this.currentOutput = "";
this.stdout.write("\n");
this.isBusy = true;
this.stdin.pause();
this.stdin.setRawMode(false);
await this.core.handle(this.core, src);
this.stdin.setRawMode(true);
this.stdin.resume();
this.isBusy = false;
this.stdout.write("\n");
this.preprint();
this.print();
}
}
module.exports = AlienRepl;
<file_sep>#!/usr/bin/env sh
version="0.0.11"
info() {
echo -e "\e[92m$1\e[0m"
}
error() {
echo -e "\e[91mError! $1\e[0m"
}
run() {
command="$@"
if [ -z ${noroot+x} ]; then
if [ -x "$(command -v sudo)" ]; then
sudo $command
else
su root -c "$command"
fi
else
$command
fi
}
checkdeps() {
if ! { [ -x "$(command -v fetch)" ] || [ -x "$(command -v wget)" ] || [ -x "$(command -v curl)" ]; }; then
error "This script needs one of: fetch, wget or curl"
exit 1
elif ! { [ -x "$(command -v npm)" ] && [ -x "$(command -v node)" ]; }; then
error "This script needs npm and node"
exit 1
elif ! [ -x "$(command -v cmake)" ]; then
error "This script needs cmake"
exit 1
elif ! [ -x "$(command -v tar)" ]; then
error "This script needs tar"
exit 1
elif ! [ -x "$(command -v whoami)" ]; then
error "This script needs whoami"
exit 1
fi
}
getvars() {
bundle="bundle.tar.gz"
dirname="alien*"
tarball="https://github.com/alien-sh/alien/archive/v$version.tar.gz"
if [ -d "./.git" ]; then
checkout="YES"
fi
}
getargs() {
while [ "$#" -gt 0 ]
do
case "$1" in
-p|--prefix)
shift
prefix="$1"
;;
-p=*|--prefix=*)
prefix="${1#*=}"
;;
-np|--no-plugins)
noplugins="YES"
;;
-nr|--no-root)
noroot="YES"
;;
*)
error "Unknown option '$key'"
exit 1
;;
esac
shift
done
}
checkargs() {
if [ -z ${prefix+x} ]; then
info "Prefix not set, setting to /usr/local"
prefix="/usr/local"
fi
}
download() {
if ! [ -z ${checkout+x} ]; then
info "Skipping download"
return 0
fi
info "Downloading: $tarball"
if [ -x "$(command -v fetch)" ]; then
fetch $tarball -o $bundle
elif [ -x "$(command -v wget)" ]; then
wget $tarball -O $bundle
elif [ -x "$(command -v curl)" ]; then
curl $tarball --output $bundle
fi
}
maketemp() {
if ! [ -z ${checkout+x} ]; then
info "Skipping temp dir creation"
return 0
fi
tmpdir=$(mktemp -d -t al-XXXXXXXXXX)
info "Created temp dir: $tmpdir"
cd $tmpdir
}
untar() {
if ! [ -z ${checkout+x} ]; then
return 0
fi
tar xzf $bundle
cd $dirname
}
install() {
if ! [ -z ${checkout+x} ]; then
info "Installing from checkout"
fi
info "Installing dependencies"
npm install
info "Installing in $prefix"
run "npm install --prefix $prefix -g ."
run "node build.js $prefix"
}
homesetup() {
cd ~
if ! [ -d ".alien" ]; then
mkdir ".alien"
fi
if ! [ -z ${noplugins+x} ]; then
info "Skipping plugin install"
return 0
fi
cd ".alien"
if ! [ -f "package.json" ]; then
configdir="$(whoami)-alien-config"
mkdir $configdir
cd $configdir
npm init -y
mv "package.json" ..
cd ..
rm -rf $configdir
fi
npm i -S "@alien.sh/core-plugins"
echo 'module.exports = [require("@alien.sh/core-plugins")];' > "plugins.js"
}
main() {
checkdeps
getvars
getargs $@
checkargs
maketemp
download
untar
install
homesetup
info "Install done."
}
main $@
| e6c0f88fbc58d5e0322a2d26000135e9de291c1e | [
"CMake",
"JavaScript",
"Markdown",
"C",
"Shell"
] | 8 | C | alien-sh/alien | 6e7cc4d7e169677b7cd43fe6f8f7bb3bb888c5eb | 6209b833775998cdfd92bf7a0ab2af2af1948130 | |
refs/heads/master | <file_sep>package com.yunjian.activity;
import java.util.List;
import com.umeng.analytics.MobclickAgent;
import com.yunjian.connection.HttpUtils.EHttpError;
import com.yunjian.fragment.SearchResultFragment;
import com.yunjian.service.BookService;
import com.yunjian.service.OnQueryCompleteListener;
import com.yunjian.service.QueryId;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ListView;
public class SearchActivity extends Activity implements OnClickListener,OnQueryCompleteListener{
private AutoCompleteTextView autoCompleteTextView;
private Button searchButton;
private ListView listView;
private List<String>booksList;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_search);
initView();
}
public void initView(){
autoCompleteTextView = (AutoCompleteTextView) findViewById(R.id.search_et);
listView = (ListView) findViewById(R.id.search_list);
searchButton = (Button) findViewById(R.id.search_btn);
autoCompleteTextView.addTextChangedListener(textWatcher);
searchButton.setOnClickListener(this);
listView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2,
long arg3) {
// TODO Auto-generated method stub
listView.setVisibility(View.GONE);
FragmentManager fManager = getFragmentManager();
Fragment searchresultFragment = new SearchResultFragment();
((SearchResultFragment) searchresultFragment).setCountent(booksList.get(arg2));
fManager.beginTransaction().replace(R.id.search_result_ll,searchresultFragment).commit();
}
});
new BookService().getSimilarBook("Κι", SearchActivity.this);
}
private TextWatcher textWatcher = new TextWatcher() {
@Override
public void onTextChanged(CharSequence arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stub
String content = autoCompleteTextView.getText().toString();
try {
new BookService().getSimilarBook(content, SearchActivity.this);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
@Override
public void beforeTextChanged(CharSequence arg0, int arg1, int arg2,
int arg3) {
// TODO Auto-generated method stub
}
@Override
public void afterTextChanged(Editable arg0) {
// TODO Auto-generated method stub
}
};
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
switch (arg0.getId()) {
case R.id.search_btn:
listView.setVisibility(View.GONE);
FragmentManager fManager = getFragmentManager();
Fragment searchresultFragment = new SearchResultFragment();
((SearchResultFragment) searchresultFragment).setCountent(autoCompleteTextView.getText().toString());
fManager.beginTransaction().replace(R.id.search_result_ll,searchresultFragment).commit();
break;
default:
break;
}
}
@Override
public void onQueryComplete(QueryId queryId, Object result, EHttpError error) {
// TODO Auto-generated method stub
if(queryId.equals(BookService.AUTOCOMPLETE)){
if(result != null){
booksList = (List<String>) result;
ArrayAdapter<String> adapter = new ArrayAdapter<String>(SearchActivity.this, android.R.layout.simple_list_item_1, booksList);
listView.setAdapter(adapter);
listView.setVisibility(View.VISIBLE);
}
}
}
@Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
MobclickAgent.onPause(this);
}
@Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
MobclickAgent.onResume(this);
}
}
<file_sep>package com.yunjian.activity;
import com.umeng.analytics.MobclickAgent;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
public class LoadingActivity extends Activity{
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_loading);
new Thread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
try {
Thread.sleep(2000);
Intent intent = new Intent(LoadingActivity.this,MainActivity.class);
startActivity(intent);
finish();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}).start();
}
public static boolean isNetworkAvailable(Activity activity)
{
Context context = activity.getApplicationContext();
// 获取手机所有连接管理对象(包括对wi-fi,net等连接的管理)
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivityManager == null)
{
return false;
}
else
{
// 获取NetworkInfo对象
NetworkInfo[] networkInfo = connectivityManager.getAllNetworkInfo();
if (networkInfo != null && networkInfo.length > 0)
{
for (int i = 0; i < networkInfo.length; i++)
{
// 判断当前网络状态是否为连接状态
if (networkInfo[i].getState() == NetworkInfo.State.CONNECTED)
{
return true;
}
}
}
}
return false;
}
@Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
MobclickAgent.onPause(this);
}
@Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
MobclickAgent.onResume(this);
}
}
<file_sep>package com.yunjian.adapter;
import java.util.List;
import java.util.Map;
import com.yunjian.activity.R;
import com.yunjian.image.ImageLoader;
import com.yunjian.util.Utils;
import com.yunjian.view.CircleImageView;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
public class MessageCenterAdapter extends BaseAdapter{
private Context context;
private LayoutInflater layoutInflater;
private List<Map<String, Object>>list;
private ImageLoader mImageLoader;
public MessageCenterAdapter(Context context,List<Map<String, Object>>list){
this.context = context;
this.layoutInflater = LayoutInflater.from(context);
this.list = list;
mImageLoader = ImageLoader.getInstance(context);
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return list.size();
}
@Override
public Object getItem(int arg0) {
// TODO Auto-generated method stub
return list.get(arg0);
}
@Override
public long getItemId(int arg0) {
// TODO Auto-generated method stub
return 0;
}
@SuppressWarnings("unused")
@Override
public View getView(int arg0, View arg1, ViewGroup arg2) {
// TODO Auto-generated method stub
Item item = null;
if(item == null){
item = new Item();
arg1 = layoutInflater.inflate(R.layout.message_item, null);
item.imageView = (CircleImageView)arg1.findViewById(R.id.message_icon);
item.messageType = (TextView)arg1.findViewById(R.id.message_type);
item.messageContent = (TextView)arg1.findViewById(R.id.message_content);
item.messageTime = (TextView)arg1.findViewById(R.id.message_time);
item.messageAfterType = (TextView)arg1.findViewById(R.id.message_aftertype);
String type = list.get(arg0).get("type").toString();
if(type.equals("0.0")){
item.messageType.setText("系统消息");
item.imageView.setBackgroundResource(R.drawable.message_system_icon);
item.messageContent.setText(list.get(arg0).get("content").toString());
}
else if(type.equals("1.0")){
item.messageAfterType.setText("评论了你卖的书籍");
item.messageType.setText(list.get(arg0).get("username").toString());
item.messageContent.setText(list.get(arg0).get("content").toString());
item.messageTime.setText(list.get(arg0).get("time").toString());
//图片加载
try {
mImageLoader.addTask(Utils.URL+list.get(arg0).get("user_id"), item.imageView);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
else if(type.equals("2.0")){
try {
item.messageAfterType.setText("评论了你的心愿书单");
item.messageType.setText(list.get(arg0).get("username").toString());
item.messageContent.setText(list.get(arg0).get("content").toString());
item.messageTime.setText(list.get(arg0).get("time").toString());
} catch (Exception e) {
// TODO: handle exception
}
//图片加载
try {
mImageLoader.addTask(Utils.URL+list.get(arg0).get("user_id"), item.imageView);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
else if(type.equals("3.0")){
item.messageType.setText("心愿单通知");
item.imageView.setBackgroundResource(R.drawable.message_wish_icon);
item.messageContent.setText("你的心愿单被"+list.get(arg0).get("username").toString()+"接下啦");
item.messageTime.setText(list.get(arg0).get("time").toString());
}
arg1.setTag(item);
}
else{
item=(Item)arg1.getTag();
}
return arg1;
}
private class Item{
private CircleImageView imageView;
private TextView messageType;
private TextView messageContent;
private TextView messageTime;
private TextView messageAfterType;
}
}
<file_sep>package com.yunjian.service;
import java.io.FileOutputStream;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.http.message.BasicNameValuePair;
import android.app.Activity;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.IBinder;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.yunjian.connection.HttpUtils;
import com.yunjian.connection.HttpUtils.EHttpError;
import com.yunjian.util.Utils;
public class BookService extends Service{
private final String ADDBOOKACTION = "book/setBookInfo";
private final String GETBOOKSBYTYPEACTION = "book/getBooksByType";
private final String GETBOOKBYNAMEACTION = "book/getBooksByName";
private final String SEARCHBOOKACTION = "book/searchBook";
private final String GETSIMILARBOOKNAME = "book/getSimilarBookname";
private final String CLICKBOOKACTION = "book/bookClicked";
public final static QueryId GETBOOKBYNAME = new QueryId();
public static final QueryId GETBOOKBYTAPE = new QueryId();
public final static QueryId CLICKWISH = new QueryId();
public static final QueryId AUTOCOMPLETE = new QueryId();
/*
* @function 发布二手书
* @param
* @return
*/
public void addBook(Map<String, Object>map,OnQueryCompleteListener onQueryCompleteListener){
List<BasicNameValuePair>parms = new ArrayList<BasicNameValuePair>();
parms.add(new BasicNameValuePair("user_id", map.get("user_id").toString()));
parms.add(new BasicNameValuePair("bookname", map.get("bookname").toString()));
parms.add(new BasicNameValuePair("book_id", map.get("book_id").toString()));
parms.add(new BasicNameValuePair("username", map.get("username").toString()));
parms.add(new BasicNameValuePair("price", map.get("bookprice").toString()));
parms.add(new BasicNameValuePair("type", map.get("type").toString()));
parms.add(new BasicNameValuePair("newness", map.get("newness").toString()));
parms.add(new BasicNameValuePair("audience", map.get("audience").toString()));
parms.add(new BasicNameValuePair("description", map.get("description").toString()));
parms.add(new BasicNameValuePair("mobile", map.get("mobile").toString()));
parms.add(new BasicNameValuePair("qq", map.get("qq").toString()));
parms.add(new BasicNameValuePair("weixin", map.get("wexin").toString()));
parms.add(new BasicNameValuePair("img1", map.get("img1").toString()));
parms.add(new BasicNameValuePair("img2", map.get("img2").toString()));
parms.add(new BasicNameValuePair("img3", map.get("img3").toString()));
System.out.println("addbook");
HttpUtils.makeAsyncPost(ADDBOOKACTION, parms,
new QueryCompleteHandler(onQueryCompleteListener, new QueryId()) {
@Override
public void handleResponse(String jsonResult, EHttpError error) {
// TODO Auto-generated method stub
if(jsonResult!=null&&error == EHttpError.KErrorNone){
System.out.println("发布书单"+jsonResult);
this.completeListener.onQueryComplete(new QueryId(), jsonResult, error);
}
else {
this.completeListener.onQueryComplete(new QueryId(), null, error);
}
}
});
}
/*
* @function 得到二手书
* @param
* @return
*/
public void getBooksByType(String type,String order_by,String page,OnQueryCompleteListener onQueryCompleteListener,final Context context){
List<BasicNameValuePair>parms = new ArrayList<BasicNameValuePair>();
parms.add(new BasicNameValuePair("type", type));
parms.add(new BasicNameValuePair("order_by", order_by));
parms.add(new BasicNameValuePair("page", page));
parms.add(new BasicNameValuePair("pagesize","18"));
parms.add(new BasicNameValuePair("user_id",Utils.user_id));
HttpUtils.makeAsyncPost(GETBOOKSBYTYPEACTION, parms,
new QueryCompleteHandler(onQueryCompleteListener, GETBOOKBYTAPE) {
Map<String, Object>books;
@Override
public void handleResponse(String jsonResult, EHttpError error) {
// TODO Auto-generated method stub
if(jsonResult!=null&&error == EHttpError.KErrorNone){
Gson gson = new Gson();
Type type = new TypeToken<Map<String, Object>>() {
}.getType();
books = gson.fromJson(jsonResult, type);
List<Map<String, Object>>list = (List<Map<String, Object>>) books.get("books");
saveCache(context, jsonResult);
System.out.println("旧书"+list);
this.completeListener.onQueryComplete(GETBOOKBYTAPE, books, error);
}
else {
this.completeListener.onQueryComplete(GETBOOKBYTAPE, null, error);
}
}
});
}
public void saveCache(Context context,String filecontent){
FileOutputStream out = null;
try {
out = context.openFileOutput("booklist", Context.MODE_PRIVATE);
out.write(filecontent.getBytes("UTF-8"));
System.out.println("写入成功");
} catch (Exception e) {
e.printStackTrace();
}
finally{
try {
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
/*
* @function 得到书籍详情
* @param
* @return
*/
public void getBooksByName(String bookname,OnQueryCompleteListener onQueryCompleteListener){
List<BasicNameValuePair>parms = new ArrayList<BasicNameValuePair>();
parms.add(new BasicNameValuePair("bookname", bookname));
HttpUtils.makeAsyncPost(GETBOOKBYNAMEACTION, parms,
new QueryCompleteHandler(onQueryCompleteListener, GETBOOKBYNAME) {
Map<String, Object>books;
@Override
public void handleResponse(String jsonResult, EHttpError error) {
// TODO Auto-generated method stub
if(jsonResult!=null&&error == EHttpError.KErrorNone){
Gson gson = new Gson();
Type type = new TypeToken<Map<String, Object>>() {
}.getType();
books = gson.fromJson(jsonResult, type);
List<Map<String, Object>>list = (List<Map<String, Object>>) books.get("books");
System.out.println("书籍详情"+list);
this.completeListener.onQueryComplete(GETBOOKBYNAME, list, error);
}
else {
this.completeListener.onQueryComplete(GETBOOKBYNAME, null, error);
}
}
});
}
/*
* @Function 搜索二手书
*/
public void searchBook(String bookname,String type,String page,OnQueryCompleteListener onQueryCompleteListener){
List<BasicNameValuePair>parms = new ArrayList<BasicNameValuePair>();
parms.add(new BasicNameValuePair("keyword", bookname));
parms.add(new BasicNameValuePair("page", page));
parms.add(new BasicNameValuePair("pagesize", "6"));
parms.add(new BasicNameValuePair("type", type));
HttpUtils.makeAsyncPost(SEARCHBOOKACTION, parms,
new QueryCompleteHandler(onQueryCompleteListener, new QueryId()) {
Map<String, Object>books;
@Override
public void handleResponse(String jsonResult, EHttpError error) {
// TODO Auto-generated method stub
if(jsonResult!=null&&error == EHttpError.KErrorNone){
Gson gson = new Gson();
Type type = new TypeToken<Map<String, Object>>() {
}.getType();
books = gson.fromJson(jsonResult, type);
List<Map<String, Object>>list = (List<Map<String, Object>>) books.get("books");
System.out.println("搜索到的书"+list);
this.completeListener.onQueryComplete(new QueryId(), list, error);
}
else {
this.completeListener.onQueryComplete(new QueryId(), null, error);
}
}
});
}
/*
* 近似书名
*/
public void getSimilarBook(String bookname,OnQueryCompleteListener onQueryCompleteListener){
List<BasicNameValuePair>parms = new ArrayList<BasicNameValuePair>();
parms.add(new BasicNameValuePair("bookname", bookname));
parms.add(new BasicNameValuePair("limit", 10+""));
HttpUtils.makeAsyncPost(GETSIMILARBOOKNAME, parms,
new QueryCompleteHandler(onQueryCompleteListener, AUTOCOMPLETE) {
@Override
public void handleResponse(String jsonResult, EHttpError error) {
// TODO Auto-generated method stub
Map<String, Object>books;
if(jsonResult!=null&&error == EHttpError.KErrorNone){
Gson gson = new Gson();
Type type = new TypeToken<Map<String, Object>>() {
}.getType();
books = gson.fromJson(jsonResult, type);
List<String>list = (List<String>) books.get("booknames");
System.out.println("搜索到的书"+list);
this.completeListener.onQueryComplete(AUTOCOMPLETE, list, error);
}
else {
this.completeListener.onQueryComplete(AUTOCOMPLETE, null, error);
}
}
});
}
/*
* 最新
*/
public void clickListener(String bookid,OnQueryCompleteListener onQueryCompleteListener){
List<BasicNameValuePair>parms = new ArrayList<BasicNameValuePair>();
parms.add(new BasicNameValuePair("book_id", bookid));
HttpUtils.makeAsyncPost(CLICKBOOKACTION, parms,
new QueryCompleteHandler(onQueryCompleteListener, CLICKWISH) {
@Override
public void handleResponse(String jsonResult, EHttpError error) {
// TODO Auto-generated method stub
if(jsonResult!=null&&error == EHttpError.KErrorNone){
this.completeListener.onQueryComplete(CLICKWISH, jsonResult, error);
}
else {
this.completeListener.onQueryComplete(CLICKWISH, jsonResult, error);
}
}
});
}
@Override
public IBinder onBind(Intent arg0) {
// TODO Auto-generated method stub
return null;
}
}
<file_sep># OldBookMarket
This is the project of 校园淘书
<file_sep>package com.yunjian.fragment;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Map;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.umeng.analytics.MobclickAgent;
import com.yunjian.activity.AddWishActivity;
import com.yunjian.activity.LoadingActivity;
import com.yunjian.activity.LoginActivity;
import com.yunjian.activity.R;
import com.yunjian.activity.WishDetailActivity;
import com.yunjian.adapter.BookAdapter;
import com.yunjian.adapter.WishAdapter;
import com.yunjian.connection.HttpUtils.EHttpError;
import com.yunjian.service.OnQueryCompleteListener;
import com.yunjian.service.QueryId;
import com.yunjian.service.WishService;
import com.yunjian.util.Utils;
import com.yunjian.view.LoadingDialog;
import com.yunjian.view.PullToRefreshView;
import com.yunjian.view.PullToRefreshView.OnFooterRefreshListener;
import com.yunjian.view.PullToRefreshView.OnHeaderRefreshListener;
import android.app.Fragment;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.Toast;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.RadioGroup.OnCheckedChangeListener;
public class WishFragment extends Fragment implements OnHeaderRefreshListener,
OnFooterRefreshListener, OnClickListener{
private PullToRefreshView mPullToRefreshView;
private ListView listView;
private LinearLayout productButton;
private Button allbook,coursebook,english,japanese,technology,master,entertain;
private WishAdapter adapter;
private List<Map<String, Object>> list;
private int page = 1;
private String order_by = "clicks";
private int type = 0;
private WishService service;
private OnQueryCompleteListener onQueryCompleteListener;
private LoadingDialog loadingDialog;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// TODO Auto-generated method stub
View view = inflater.inflate(R.layout.wish_book, null);
initView(view);
return view;
}
@Override
public void onResume() {
// TODO Auto-generated method stub
super.onResume();
initService();
}
@Override
public void onPause() {
super.onPause();
}
public void initService(){
service = new WishService();
onQueryCompleteListener = new OnQueryCompleteListener() {
@Override
public void onQueryComplete(QueryId queryId, Object result,
EHttpError error) {
// TODO Auto-generated method stub
loadingDialog.dismiss();
if(WishService.LISTWISH.equals(queryId)){
if (result != null) {
if (page == 1) {
list = (List<Map<String, Object>>) result;
try {
adapter = new WishAdapter(getActivity(), list);
} catch (Exception e) {
// TODO: handle exception
}
listView.setAdapter(adapter);
} else {
List<Map<String, Object>> temp = (List<Map<String, Object>>) result;
for (int i = 0; i < temp.size(); i++) {
list.add(temp.get(i));
}
adapter.notifyDataSetChanged();
}
} else {
Toast.makeText(getActivity(), "网络连接超时", 2000).show();
loadingDialog.dismiss();
}
}
else if(queryId.equals(WishService.CLICKWISH)){
System.out.println("click"+result);
}
}
};
getCache();
// 启动后台服务
if(LoadingActivity.isNetworkAvailable(getActivity())){
resetService();
}
else {
Toast.makeText(getActivity(), "请检查你的网络", Toast.LENGTH_SHORT).show();
}
}
public void resetService() {
service.getWishes(type,String.valueOf(page), order_by,
onQueryCompleteListener,getActivity());
loadingDialog.show();
}
public void getCache(){
String filename = "wishlist"; //获得读取的文件的名称
FileInputStream in = null;
ByteArrayOutputStream bout = null;
byte[]buf = new byte[1024];
bout = new ByteArrayOutputStream();
int length = 0;
try {
in = getActivity().openFileInput(filename); //获得输入流
while((length=in.read(buf))!=-1){
bout.write(buf,0,length);
}
byte[] content = bout.toByteArray();
Gson gson = new Gson();
Type type = new TypeToken<Map<String, Object>>() {
}.getType();
Map<String, Object>wishes = gson.fromJson(new String(content,"UTF-8"), type);
list = (List<Map<String, Object>>) wishes.get("wishes");
adapter = new WishAdapter(getActivity(), list);
listView.setAdapter(adapter);
} catch (Exception e) {
e.printStackTrace();
}
try{
in.close();
bout.close();
}
catch(Exception e){}
}
public void resetButtonColor(){
allbook.setTextColor(Color.BLACK);
coursebook.setTextColor(Color.BLACK);
english.setTextColor(Color.BLACK);
japanese.setTextColor(Color.BLACK);
technology.setTextColor(Color.BLACK);
master.setTextColor(Color.BLACK);
entertain.setTextColor(Color.BLACK);
coursebook.setBackgroundResource(R.drawable.white);
allbook.setBackgroundResource(R.drawable.white);
english.setBackgroundResource(R.drawable.white);
japanese.setBackgroundResource(R.drawable.white);
technology.setBackgroundResource(R.drawable.white);
master.setBackgroundResource(R.drawable.white);
entertain.setBackgroundResource(R.drawable.white);
}
public void initView(View view)
{
mPullToRefreshView = (PullToRefreshView) view.findViewById(R.id.main_pull_refresh_view);
listView = (ListView) view.findViewById(R.id.wish_book_list);
productButton = (LinearLayout) view.findViewById(R.id.wish_product_btn);
allbook = (Button)view.findViewById(R.id.all);
coursebook = (Button)view.findViewById(R.id.coursebook);
english = (Button)view.findViewById(R.id.english);
japanese = (Button)view.findViewById(R.id.japanese);
technology = (Button)view.findViewById(R.id.technology);
master = (Button)view.findViewById(R.id.master);
entertain = (Button)view.findViewById(R.id.entertain);
productButton.setClickable(true);
listView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2,
long arg3) {
// TODO Auto-generated method stub
Intent intent = new Intent(getActivity(),
WishDetailActivity.class);
intent.putExtra("wish_id", list.get(arg2).get("wish_id")
.toString());
startActivity(intent);
service.clickListener(list.get(arg2).get("wish_id").toString(), onQueryCompleteListener);
}
});
mPullToRefreshView.setOnHeaderRefreshListener(this);
mPullToRefreshView.setOnFooterRefreshListener(this);
productButton.setOnClickListener(this);
allbook.setOnClickListener(this);
coursebook.setOnClickListener(this);
english.setOnClickListener(this);
japanese.setOnClickListener(this);
technology.setOnClickListener(this);
master.setOnClickListener(this);
entertain.setOnClickListener(this);
loadingDialog = new LoadingDialog(getActivity());
}
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
switch (arg0.getId()) {
case R.id.wish_product_btn:
if(Utils.user_id.equals("")){
Intent intent3 = new Intent(getActivity(),LoginActivity.class);
startActivity(intent3);
}
else {
Intent intent = new Intent(getActivity(), AddWishActivity.class);
startActivity(intent);
}
break;
case R.id.all:
type = 0;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
allbook.setTextColor(this.getResources().getColor(R.color.seagreen));
allbook.setBackgroundResource(R.drawable.wish_type_bg);
break;
case R.id.coursebook:
type = 1;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
coursebook.setTextColor(this.getResources().getColor(R.color.seagreen));
coursebook.setBackgroundResource(R.drawable.wish_type_bg);
break;
case R.id.english:
type = 2;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
english.setTextColor(this.getResources().getColor(R.color.seagreen));
english.setBackgroundResource(R.drawable.wish_type_bg);
break;
case R.id.japanese:
type = 3;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
japanese.setTextColor(this.getResources().getColor(R.color.seagreen));
japanese.setBackgroundResource(R.drawable.wish_type_bg);
break;
case R.id.technology:
type = 4;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
technology.setTextColor(this.getResources().getColor(R.color.seagreen));
technology.setBackgroundResource(R.drawable.wish_type_bg);
break;
case R.id.master:
type = 5;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
master.setTextColor(this.getResources().getColor(R.color.seagreen));
master.setBackgroundResource(R.drawable.wish_type_bg);
break;
case R.id.entertain:
type = 6;
page = 1;
resetService();
resetButtonColor();
loadingDialog.show();
entertain.setTextColor(this.getResources().getColor(R.color.seagreen));
entertain.setBackgroundResource(R.drawable.wish_type_bg);
break;
default:
break;
}
}
@Override
public void onFooterRefresh(PullToRefreshView view) {
// TODO Auto-generated method stub
mPullToRefreshView.postDelayed(new Runnable() {
@Override
public void run() {
page++;
System.out.println("页数" + page);
service.getWishes(type,String.valueOf(page), order_by,
onQueryCompleteListener,getActivity());
mPullToRefreshView.onFooterRefreshComplete();
}
}, 1000);
}
@Override
public void onHeaderRefresh(PullToRefreshView view) {
// TODO Auto-generated method stub
mPullToRefreshView.postDelayed(new Runnable() {
@Override
public void run() {
// 设置更新时间
// mPullToRefreshView.onHeaderRefreshComplete("最近更新:01-23 12:01");
list.clear();
page = 1;
service.getWishes(type,String.valueOf(page), order_by,
onQueryCompleteListener,getActivity());
mPullToRefreshView.onHeaderRefreshComplete();
}
}, 1000);
}
}
<file_sep>package com.yunjian.adapter;
import java.util.List;
import java.util.Map;
import com.yunjian.activity.AddWishActivity;
import com.yunjian.activity.R;
import com.yunjian.connection.HttpUtils.EHttpError;
import com.yunjian.fragment.MyWishesFragment;
import com.yunjian.image.ImageLoader;
import com.yunjian.service.OnQueryCompleteListener;
import com.yunjian.service.QueryId;
import com.yunjian.service.UserCenterService;
import com.yunjian.util.Utils;
import com.yunjian.util.SerializableMap;
import android.app.AlertDialog;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class MyWishesAdapter extends BaseAdapter{
private Context context;
private LayoutInflater layoutInflater;
private List<Map<String, Object>>list;
private ImageLoader imageLoader;
private UserCenterService service;
private OnQueryCompleteListener onQueryCompleteListener;
private Fragment mywishesfraFragment;
public MyWishesAdapter(Context context,List<Map<String, Object>>list,Fragment mywishes){
this.context = context;
layoutInflater = LayoutInflater.from(context);
this.list = list;
this.mywishesfraFragment = mywishes;
imageLoader = ImageLoader.getInstance(context);
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return list.size();
}
@Override
public Object getItem(int arg0) {
// TODO Auto-generated method stub
return list.get(arg0);
}
@Override
public long getItemId(int arg0) {
// TODO Auto-generated method stub
return arg0;
}
@Override
public View getView(int arg0, View arg1, ViewGroup arg2) {
// TODO Auto-generated method stub
Item item = null;
if(item == null){
item = new Item();
arg1 = layoutInflater.inflate(R.layout.wisheslist_item,null);
item.imageView = (ImageView)arg1.findViewById(R.id.wishphoto);
item.wishname = (TextView)arg1.findViewById(R.id.wishname);
item.wishstatus = (TextView)arg1.findViewById(R.id.wishstaus);
item.editButton = (Button)arg1.findViewById(R.id.edit_btn);
item.achieveButton = (Button)arg1.findViewById(R.id.achieve_btn);
item.wishname.setText(list.get(arg0).get("bookname").toString());
String status = list.get(arg0).get("status").toString();
if(status.equals("2.0")){
item.wishstatus.setText("心愿单有人接下啦");
item.editButton.setVisibility(View.GONE);
}
else if(status.equals("1.0")){
item.wishstatus.setText("已实现");
item.achieveButton.setVisibility(View.GONE);
item.editButton.setVisibility(View.GONE);
}
else {
item.wishstatus.setText("心愿单正常显示着呢");
}
arg1.setTag(item);
}
else {
item = (Item)arg1.getTag();
}
//加载图片
try {
int length = list.get(arg0).get("imgs").toString().length();
if(length>10){
imageLoader.addTask(Utils.IMGURL+list.get(arg0).get("imgs").toString().substring(1,37), item.imageView);
}
} catch (Exception e) {
// TODO: handle exception
}
onQueryCompleteListener = new OnQueryCompleteListener() {
@Override
public void onQueryComplete(QueryId queryId, Object result, EHttpError error) {
// TODO Auto-generated method stub
if(result.equals("success")){
Toast.makeText(context, "心愿已实现", 2000).show();
((MyWishesFragment) mywishesfraFragment).resetService();
}
else {
Toast.makeText(context, "实现失败", 2000).show();
}
}
};
ItemClickListener itemClickListener = new ItemClickListener(arg0);
//编辑监听
item.editButton.setOnClickListener(itemClickListener);
//实现监听
item.achieveButton.setOnClickListener(itemClickListener);
return arg1;
}
public class ItemClickListener implements OnClickListener{
private int position;
public ItemClickListener(int position){
this.position = position;
}
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
if(arg0.getId()==R.id.edit_btn){
Intent intent = new Intent(context,AddWishActivity.class);
Bundle bundle = new Bundle();
Map<String,Object> data=list.get(position);
SerializableMap tmpmap=new SerializableMap();
tmpmap.setMap(data);
bundle.putSerializable("wishinfo", tmpmap);
intent.putExtras(bundle);
Utils.IFEDITWISH = 1;
context.startActivity(intent);
}
else if (arg0.getId()==R.id.achieve_btn) {
new AlertDialog.Builder(context)
.setTitle("确认售出")
.setMessage("书籍出售后将不再显示,其他人也看不到此商品,是否确认售出")
.setPositiveButton("是",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0, int arg1) {
// TODO Auto-generated method stub
service = new UserCenterService();
service.setWishStatus(Utils.user_id, Utils.username,list.get(position).get("wish_id").toString(), 1, onQueryCompleteListener);
}
})
.setNegativeButton("否", null)
.show();
}
}
}
private class Item{
private ImageView imageView;
private TextView wishname;
private TextView wishstatus;
private Button editButton;
private Button achieveButton;
}
}
<file_sep>package com.yunjian.adapter;
import java.util.List;
import java.util.Map;
import com.yunjian.activity.LoginActivity;
import com.yunjian.activity.R;
import com.yunjian.connection.HttpUtils.EHttpError;
import com.yunjian.image.ImageLoader;
import com.yunjian.service.OnQueryCompleteListener;
import com.yunjian.service.QueryId;
import com.yunjian.service.UserCenterService;
import com.yunjian.util.Utils;
import com.yunjian.view.CircleImageView;
import com.yunjian.view.HelpAchievePop;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class WishAdapter extends BaseAdapter implements OnQueryCompleteListener{
private LayoutInflater layoutInflater;
private Context context;
private List<Map<String, Object>>list;
private ImageLoader mImageLoader;
public WishAdapter(Context context,List<Map<String, Object>>list){
this.context = context;
this.list = list;
this.layoutInflater = LayoutInflater.from(context);
mImageLoader = ImageLoader.getInstance(context);
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return list.size();
}
@Override
public Object getItem(int arg0) {
// TODO Auto-generated method stub
return list.get(arg0);
}
@Override
public long getItemId(int arg0) {
// TODO Auto-generated method stub
return 0;
}
@SuppressWarnings("unused")
@Override
public View getView(int arg0, View arg1, ViewGroup arg2) {
// TODO Auto-generated method stub
Item item = null;
if (item == null) {
item = new Item();
arg1 = layoutInflater.inflate(R.layout.list_item, null);
item.userImage = (CircleImageView)arg1.findViewById(R.id.user_image);
item.userName = (TextView)arg1.findViewById(R.id.user_name);
item.userSex = (ImageView)arg1.findViewById(R.id.user_sex);
item.achieve = (Button)arg1.findViewById(R.id.achieve);
item.bookName = (TextView)arg1.findViewById(R.id.book_name);
item.wishContent = (TextView)arg1.findViewById(R.id.wish_content);
//图片加载
try {
mImageLoader.addTask(Utils.URL+list.get(arg0).get("user_id"), item.userImage);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
item.userName.setText(list.get(arg0).get("username").toString());
item.bookName.setText(list.get(arg0).get("bookname").toString());
item.wishContent.setText(list.get(arg0).get("description").toString());
if(list.get(arg0).get("gender").toString().equals("0.0")){
item.userSex.setImageResource(R.drawable.user_sex_woman);
}
else if(list.get(arg0).get("gender").toString().equals("2.0")){
item.userSex.setImageResource(R.drawable.user_sex_secret);
}
arg1.setTag(item);
}
else {
item = (Item)arg1.getTag();
}
//图片加载
try {
//mImageLoader.addTask(Utils.IMGURL+list.get(arg0).get("img"), item.imageView);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
ItemClickListener itemClickListener = new ItemClickListener(arg0);
item.achieve.setOnClickListener(itemClickListener);
return arg1;
}
public class ItemClickListener implements OnClickListener{
private int pos;
public ItemClickListener(int position){
this.pos = position;
}
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
if(arg0.getId() == R.id.achieve){
if(list.get(pos).get("user_id").toString().equals(Utils.user_id)){
Toast.makeText(context, "这是你自己的心愿单喔", 2000).show();
}
else if(Utils.user_id.equals("")){
Intent intent3 = new Intent(context,LoginActivity.class);
context.startActivity(intent3);
}
else {
HelpAchievePop helpAchievePop = new HelpAchievePop(context,list.get(pos));
helpAchievePop.showAtLocation(((Activity) context)
.findViewById(R.id.wishmain), Gravity.BOTTOM|Gravity.CENTER_HORIZONTAL, 0, 0);
}
}
}
}
private class Item{
private CircleImageView userImage;
private TextView userName;
private ImageView userSex;
private Button achieve;
private TextView bookName;
private TextView wishContent;
}
@Override
public void onQueryComplete(QueryId queryId, Object result, EHttpError error) {
// TODO Auto-generated method stub
if(result.equals("success")){
Toast.makeText(context, "已经接下心愿单", 2000).show();
}
}
}
<file_sep>remoteHost = http://172.16.31.10:5000/<file_sep>package com.yunjian.activity;
import java.io.File;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.umeng.analytics.MobclickAgent;
import com.yunjian.adapter.BookDetailCommentAdapter;
import com.yunjian.connection.HttpUtils.EHttpError;
import com.yunjian.image.AsyncImageLoader;
import com.yunjian.image.ImageLoader;
import com.yunjian.service.BookService;
import com.yunjian.service.OnQueryCompleteListener;
import com.yunjian.service.QueryId;
import com.yunjian.service.UserManageService;
import com.yunjian.service.WishService;
import com.yunjian.util.ScreenShot;
import com.yunjian.util.Utils;
import com.yunjian.view.CircleImageView;
import com.yunjian.view.ConnectSellerPopwindow;
import com.yunjian.view.GestureListener;
import com.yunjian.view.InputPopwindow;
import com.yunjian.view.MyScrollView;
import com.yunjian.view.NoScrollListView;
import com.yunjian.view.ScrollListener;
public class BookDetailActivity extends Activity implements OnClickListener {
private TextView title, readTime,publishDays,price,
userName,userTel,userQQ,userWinxin,basicCondition,suitCrowd,
myEvaluation,showAll,emptytTextView;
private CircleImageView userImage;
private LinearLayout llUserQQ,llUserWeChat;
private RelativeLayout header,nextLayout;
private ImageButton back;
private ImageView bookDetailImage1,bookDetailImage2,bookDetailImage3;
private Button nextSeller,frontSeller;
private NoScrollListView comment;
private ImageView bottomComment,bottomConnect,bottomShare,usersex;
private Boolean showAllFlag=true;
private List<Map<String, Object>> list;
private List<Map<String, Object>> commentlist;
private Map<String, Object>map;
private int curPage=0,maxPage=0;
private LinearLayout bottomLayout;
private OnQueryCompleteListener onQueryCompleteListener;
private ImageLoader imageLoader;
private BookDetailCommentAdapter bookDetailCommentAdapter;
private BookService service;
private String bookname,bookid;
private int image_number = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.book_detail);
imageLoader = ImageLoader.getInstance(this);
initView();
onQueryCompleteListener = new OnQueryCompleteListener() {
@Override
public void onQueryComplete(QueryId queryId, Object result, EHttpError error) {
// TODO Auto-generated method stub
if(result!=null){
if(queryId.equals(BookService.GETBOOKBYNAME)){
list = (List<Map<String, Object>>) result;
getInfomation();
}
else if(queryId.equals(WishService.GETCOMMENT)){
commentlist = (List<Map<String, Object>>) result;
if(commentlist.size() == 0){
emptytTextView.setVisibility(View.VISIBLE);
bookDetailCommentAdapter = new BookDetailCommentAdapter(BookDetailActivity.this,commentlist);
comment.setAdapter(bookDetailCommentAdapter);
}
else {
emptytTextView.setVisibility(View.GONE);
bookDetailCommentAdapter = new BookDetailCommentAdapter(BookDetailActivity.this,commentlist);
comment.setAdapter(bookDetailCommentAdapter);
}
}
else if(queryId.equals(WishService.MAKECOMMENT)){
if(result.equals("success")){
Toast.makeText(BookDetailActivity.this, "评论成功", 2000).show();
}
else {
Toast.makeText(BookDetailActivity.this, "评论失败", 2000).show();
}
}
else if(queryId.equals(BookService.CLICKWISH)){
if(result.equals("success")){
}
else {
}
}
}
}
};
service = new BookService();
Intent intent = getIntent();
bookname = intent.getStringExtra("bookname");
service.getBooksByName(bookname, onQueryCompleteListener);
}
private void getInfomation() {
// TODO Auto-generated method stub
if(list.size() == 1){
nextLayout.setVisibility(View.GONE);
}
maxPage = list.size()-1;
if(list.get(curPage).get("status").toString().equals("1.0")){
bottomLayout.setVisibility(View.GONE);
}
title.setText((list.get(curPage).get("bookname")).toString());
getDays();
publishDays.setText(getDays()+"天前发布");
price.setText(list.get(curPage).get("price").toString());
userTel.setText(list.get(curPage).get("mobile").toString());
String readtime = list.get(curPage).get("clicks").toString().substring(0, list.get(curPage).get("clicks").toString().length()-2);
readTime.setText(readtime+"次浏览");
map = list.get(curPage);
bookid = list.get(curPage).get("book_id").toString();
service.clickListener(bookid, onQueryCompleteListener);
resetService();
if(list.get(curPage).get("qq").toString().equals("")){
llUserQQ.setVisibility(View.GONE);
}else{
llUserQQ.setVisibility(View.VISIBLE);
userQQ.setText(list.get(curPage).get("qq").toString());
}
if(list.get(curPage).get("weixin").toString().equals("") ||list.get(curPage).get("weixin").toString().equals(null) ){
llUserWeChat.setVisibility(View.GONE);
}else{
llUserWeChat.setVisibility(View.VISIBLE);
userWinxin.setText(list.get(curPage).get("weixin").toString());
}
userName.setText(list.get(curPage).get("username").toString());
System.out.println(list.get(curPage).get("gender").toString()+" "+list.get(curPage).get("user_id").toString());
if(list.get(curPage).get("gender").toString().equals("0.0")){
usersex.setImageResource(R.drawable.user_sex_woman);
}
else if(list.get(curPage).get("gender").toString().equals("2.0")){
usersex.setImageResource(R.drawable.user_sex_secret);
}
else if(list.get(curPage).get("gender").toString().equals("1.0")){
usersex.setImageResource(R.drawable.user_sex_man);
}
basicCondition.setText(list.get(curPage).get("newness").toString());
suitCrowd.setText(list.get(curPage).get("audience").toString());
myEvaluation.setText(list.get(curPage).get("description").toString());
imageLoader.addTask(Utils.URL+list.get(curPage).get("user_id").toString(), userImage);
int length = list.get(curPage).get("imgs").toString().length();
if(length<10){
image_number = 0;
bookDetailImage1.setVisibility(View.GONE);
bookDetailImage2.setVisibility(View.GONE);
bookDetailImage3.setVisibility(View.VISIBLE);
}
else if(length<40){
image_number = 1;
bookDetailImage1.setVisibility(View.GONE);
bookDetailImage2.setVisibility(View.GONE);
bookDetailImage3.setVisibility(View.VISIBLE);
imageLoader.addTask(Utils.IMGURL+list.get(curPage).get("imgs").toString().substring(1,37), bookDetailImage3);
}
else if(length<80){
image_number = 2;
bookDetailImage1.setVisibility(View.GONE);
bookDetailImage2.setVisibility(View.VISIBLE);
bookDetailImage3.setVisibility(View.VISIBLE);
imageLoader.addTask(Utils.IMGURL+list.get(curPage).get("imgs").toString().substring(1,37), bookDetailImage3);
imageLoader.addTask(Utils.IMGURL+list.get(curPage).get("imgs").toString().substring(39,75), bookDetailImage2);
}
else{
image_number = 3;
bookDetailImage1.setVisibility(View.VISIBLE);
bookDetailImage2.setVisibility(View.VISIBLE);
bookDetailImage3.setVisibility(View.VISIBLE);
imageLoader.addTask(Utils.IMGURL+list.get(curPage).get("imgs").toString().substring(1,37), bookDetailImage1);
imageLoader.addTask(Utils.IMGURL+list.get(curPage).get("imgs").toString().substring(39,75), bookDetailImage2);
imageLoader.addTask(Utils.IMGURL+list.get(curPage).get("imgs").toString().substring(77,113), bookDetailImage3);
}
}
public void resetService(){
new WishService().getWishComment(bookid, onQueryCompleteListener);
}
private int getDays() {
// TODO Auto-generated method stub
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date adddate = null;
try {
adddate = sdf.parse(list.get(curPage).get("added_time").toString());
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Date curDate = new Date(System.currentTimeMillis());//閼惧嘲褰囪ぐ鎾冲閺冨爼妫�
Calendar cal0 = Calendar.getInstance();
cal0.setTime(adddate);
Calendar cal1 = Calendar.getInstance();
cal1.setTime(curDate);
long time0 = cal0.getTimeInMillis();
long time1 = cal1.getTimeInMillis();
int days = (int) ((time1-time0)/(1000*3600*24));
return days;
}
private void initView() {
// TODO Auto-generated method stub
title = (TextView)findViewById(R.id.book_detail_title);
readTime = (TextView)findViewById(R.id.book_detail_read_time);
publishDays = (TextView)findViewById(R.id.book_detail_publish_days);
price = (TextView)findViewById(R.id.book_detail_price);
userImage = (CircleImageView)findViewById(R.id.book_detail_user_image);
userName = (TextView)findViewById(R.id.book_detail_user_name);
userTel = (TextView)findViewById(R.id.book_detail_user_tel);
userQQ = (TextView)findViewById(R.id.book_detail_user_QQ);
userWinxin = (TextView)findViewById(R.id.book_detail_user_weixin);
basicCondition = (TextView)findViewById(R.id.book_detail_basic_condition);
suitCrowd = (TextView)findViewById(R.id.book_detail_suit_crowd);
myEvaluation = (TextView)findViewById(R.id.book_detail_my_evaluation);
showAll = (TextView)findViewById(R.id.book_detail_show_all);
emptytTextView = (TextView)findViewById(R.id.empty_txv);
usersex = (ImageView)findViewById(R.id.user_sex);
llUserQQ = (LinearLayout)findViewById(R.id.ll_book_user_qq);
llUserWeChat = (LinearLayout)findViewById(R.id.ll_book_user_wechat);
bottomLayout = (LinearLayout)findViewById(R.id.bookdetail_bottomlayout);
header = (RelativeLayout)findViewById(R.id.header);
nextLayout = (RelativeLayout)findViewById(R.id.next_front_layout);
back = (ImageButton)findViewById(R.id.bt_detail_back);
bookDetailImage1 = (ImageView)findViewById(R.id.book_detail_image_1);
bookDetailImage2 = (ImageView)findViewById(R.id.book_detail_image_2);
bookDetailImage3 = (ImageView)findViewById(R.id.book_detail_image_3);
nextSeller = (Button)findViewById(R.id.next_seller);
frontSeller = (Button)findViewById(R.id.front_seller);
comment = (NoScrollListView)findViewById(R.id.book_detail_comment);
bottomComment = (ImageView)findViewById(R.id.book_detail_bottom_comment);
bottomConnect = (ImageView)findViewById(R.id.book_detail_bottom_connect);
bottomShare = (ImageView)findViewById(R.id.book_detail_bottom_share);
back.setOnClickListener(this);
showAll.setClickable(true);
showAll.setOnClickListener(this);
bottomComment.setClickable(true);
bottomConnect.setClickable(true);
bottomShare.setClickable(true);
header.setLongClickable(true);
header.setOnTouchListener(new MyGestureListener(this));
nextSeller.setOnClickListener(this);
frontSeller.setOnClickListener(this);
bottomComment.setOnClickListener(this);
bottomConnect.setOnClickListener(this);
bottomShare.setOnClickListener(this);
}
public class MyGestureListener extends GestureListener {
public MyGestureListener(Context context) {
super(context);
// TODO Auto-generated constructor stub
}
@Override
public boolean onTouch(View v, MotionEvent event) {
// TODO Auto-generated method stub
return super.onTouch(v, event);
}
@Override
public boolean left() {
if(image_number == 2){
Bitmap bitmap = ((BitmapDrawable) bookDetailImage2.getDrawable())
.getBitmap();
bookDetailImage2.setImageBitmap(((BitmapDrawable) bookDetailImage3
.getDrawable()).getBitmap());
bookDetailImage3.setImageBitmap(bitmap);
}else if(image_number == 3){
Bitmap bitmap = ((BitmapDrawable) bookDetailImage1.getDrawable())
.getBitmap();
bookDetailImage1.setImageBitmap(((BitmapDrawable) bookDetailImage3
.getDrawable()).getBitmap());
bookDetailImage3.setImageBitmap(((BitmapDrawable) bookDetailImage2
.getDrawable()).getBitmap());
bookDetailImage2.setImageBitmap(bitmap);
}
return super.left();
}
@Override
public boolean right() {
if(image_number == 2){
Bitmap bitmap = ((BitmapDrawable) bookDetailImage2.getDrawable())
.getBitmap();
bookDetailImage2.setImageBitmap(((BitmapDrawable) bookDetailImage3
.getDrawable()).getBitmap());
bookDetailImage3.setImageBitmap(bitmap);
}else if(image_number == 3){
Bitmap bitmap = ((BitmapDrawable) bookDetailImage1.getDrawable())
.getBitmap();
bookDetailImage1.setImageBitmap(((BitmapDrawable) bookDetailImage2
.getDrawable()).getBitmap());
bookDetailImage2.setImageBitmap(((BitmapDrawable) bookDetailImage3
.getDrawable()).getBitmap());
bookDetailImage3.setImageBitmap(bitmap);
}
return super.right();
}
}
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
switch (arg0.getId()) {
case R.id.bt_detail_back:
this.finish();
break;
case R.id.book_detail_show_all:
if(showAllFlag){
showAll.setText("向上收起");
basicCondition.setSingleLine(false);
suitCrowd.setSingleLine(false);
myEvaluation.setSingleLine(false);
showAllFlag=false;
}else{
basicCondition.setSingleLine(true);
suitCrowd.setSingleLine(true);
myEvaluation.setSingleLine(true);
showAllFlag=true;
showAll.setText("展开全部");
}
break;
case R.id.front_seller:
if(curPage>0){
curPage--;
getInfomation();
}else{
Toast.makeText(BookDetailActivity.this, "没有前一家", 2000).show();
}
break;
case R.id.next_seller:
if(curPage != maxPage){
curPage++;
getInfomation();
}else{
Toast.makeText(BookDetailActivity.this, "没有后一家", 2000).show();
}
break;
case R.id.book_detail_bottom_comment:
if(Utils.user_id.equals("")){
Intent intent3 = new Intent(BookDetailActivity.this,LoginActivity.class);
startActivity(intent3);
}
else {
InputPopwindow inputPopwindow = new InputPopwindow(this,list.get(curPage).get("book_id").toString(),0);
inputPopwindow.showAtLocation(this.findViewById(R.id.main_bottom), Gravity.BOTTOM|Gravity.CENTER_HORIZONTAL, 0, 0);
}
break;
case R.id.book_detail_bottom_connect:
if(Utils.user_id.equals("")){
Intent intent3 = new Intent(BookDetailActivity.this,LoginActivity.class);
startActivity(intent3);
}
else {
ConnectSellerPopwindow connectSellerPopwindow = new ConnectSellerPopwindow(BookDetailActivity.this, map);
connectSellerPopwindow.showAtLocation(this.findViewById(R.id.main_bottom), Gravity.BOTTOM|Gravity.CENTER_HORIZONTAL, 0, 0);
}
break;
case R.id.book_detail_bottom_share:
if(Utils.user_id.equals("")){
Intent intent3 = new Intent(BookDetailActivity.this,LoginActivity.class);
startActivity(intent3);
}
else {
ScreenShot.shoot(this);
shareMsg("/sdcard/share.png");
}
break;
default:
break;
}
}
public void shareMsg(String imgPath) {
Intent intent = new Intent(Intent.ACTION_SEND);
if (imgPath == null || imgPath.equals("")) {
intent.setType("text/plain"); // 纯文本
} else {
File f = new File(imgPath);
if (f != null && f.exists() && f.isFile()) {
intent.setType("image/png");
Uri u = Uri.fromFile(f);
intent.putExtra(Intent.EXTRA_STREAM, u);
}
}
intent.putExtra(Intent.EXTRA_SUBJECT, "分享");
intent.putExtra(Intent.EXTRA_TEXT, "我在校园淘书上看到了这本书蛮有意思, 最便捷的二手书交易App, 人生之路, 淘书起步! http://172.16.17.32:5000/download/OldBookMarket.apk");
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(Intent.createChooser(intent, "请选择"));
}
@Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
MobclickAgent.onPause(this);
}
@Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
MobclickAgent.onResume(this);
}
}
| b71c3e3737ed21f5f6edbbc2b0ea82fe7808e348 | [
"Markdown",
"Java",
"INI"
] | 10 | Java | Trisaa/OldBookMarket | ce37044380b18feebba9a405ad0bb010154b85a3 | 28749bde7c53c2e18352bfbad077891a60df0c24 | |
refs/heads/main | <repo_name>pinjanos/str-frontend-kepesito-vizsga<file_sep>/JS-task/task02-dom/dom.js
function changeOuterLinks() {
const navLinks = document.querySelectorAll('nav#link-list a');
navLinks.forEach( element => {
if (element.innerHTML.includes('outer-link')) {
element.setAttribute('target', '_blank');
element.innerHTML = `<strong>${element.innerHTML}</strong>`;
}
})
document.querySelectorAll("nav").forEach((element) => {
element.style.display = "flex";
element.style.flexDirection = "column";
element.style.margin = "0px auto";
element.style.width = "30%";
element.style.border = "1px solid blue";
element.style.padding = "16px";
});
}
export { changeOuterLinks }; | 2ed54bcc71809517efbe9300b44ef2c0a8914608 | [
"JavaScript"
] | 1 | JavaScript | pinjanos/str-frontend-kepesito-vizsga | 6db9169edff5996938b7fd4c6d825c42dcf79fed | 93d40803ac7edfd3316dca694f76ab01becf1e7d | |
refs/heads/master | <repo_name>Antoineboulou/ECS<file_sep>/Assets/Testing.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Unity.Entities;
using Unity.Transforms;
using Unity.Collections;
using Unity.Rendering;
public class Testing : MonoBehaviour
{
[SerializeField] private Mesh mesh;
[SerializeField] private Material material;
private void Start()
{
EntityManager entityManager = World.DefaultGameObjectInjectionWorld.EntityManager;
EntityArchetype entityArchetype = entityManager.CreateArchetype(
typeof (LevelUpComponent),
typeof (Translation),
typeof (RenderMesh),
typeof (LocalToWorld),
typeof (RenderBounds),
typeof (MoveSpeedComponent)
);
NativeArray<Entity> entityArray = new NativeArray<Entity>(500, Allocator.Temp);
entityManager.CreateEntity(entityArchetype, entityArray);
for (int i = 0; i < entityArray.Length; i++)
{
Entity entity = entityArray[i];
entityManager.SetComponentData(entity, new LevelUpComponent { level = Random.Range(10, 20)});
entityManager.SetComponentData(entity, new MoveSpeedComponent { speed = Random.Range(1f, 2f)});
entityManager.SetComponentData(entity, new Translation {
Value = new Unity.Mathematics.float3(Random.Range(-8,8f),Random.Range(-5,5f),0)});
entityManager.SetSharedComponentData(entity, new RenderMesh {
mesh = mesh,
material = material
});
}
entityArray.Dispose();
}
}
<file_sep>/Assets/LevelUpSystem.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Unity.Entities;
public class LevelUpSystem : ComponentSystem
{
protected override void OnUpdate() {
Entities.ForEach((ref LevelUpComponent levelUpComponent) =>
{
levelUpComponent.level += 1f * Time.DeltaTime;
Debug.Log(levelUpComponent.level);
});
}
}
| a587422a95d911187d7404eca83dc5b453b1159c | [
"C#"
] | 2 | C# | Antoineboulou/ECS | 9bb08ced1ab3cb77b54d701db1785f6e26ec059c | ba6c8ed89de2e9d2c36fa702144e68e26592837c | |
refs/heads/master | <repo_name>dpkg9510/UnloadDaemons<file_sep>/unloadapd/DEBIAN/prerm
#!/bin/bash
echo removing the script.
rm -r /etc/rc.d/unloadApd
echo Removing the script
echo REMOVED SUCCESSFULLY
exit 0<file_sep>/unloadvsd/DEBIAN/postinst
#!/bin/bash
echo Installing the script.
mkdir -p /etc/rc.d && echo "launchctl unload /System/Library/LaunchDaemons/com.apple.videosubscriptionsd.plist" > /etc/rc.d/unloadVSD && chmod 777 /etc/rc.d/unloadVSD
echo INSTALLED SUCCESSFULLY.
echo Unloading the daemon.
cd /etc/rc.d
bash unloadVSD
echo DONE!
echo /u/dpkg_ says hi
exit 0<file_sep>/unloadtipsd/DEBIAN/prerm
#!/bin/bash
echo Removing the script.
rm -r /etc/rc.d/unloadTipsd
echo REMOVED SUCCESSFULLY.
echo /u/dpkg_ says bye
exit 0<file_sep>/unloadschpa/DEBIAN/postinst
#!/bin/bash
echo Installing the script.
mkdir -p /etc/rc.d && echo "launchctl unload /System/Library/LaunchDaemons/com.apple.SafariCloudHistoryPushAgent.plist" > /etc/rc.d/unloadSchpa && chmod 777 /etc/rc.d/unloadSchpa
echo INSTALLED SUCCESSFULLY.
echo Unloading the daemon.
cd /etc/rc.d
bash unloadSchpa
echo DONE!
echo /u/dpkg_ says hi
exit 0<file_sep>/unloaduad/DEBIAN/postinst
#!/bin/bash
echo Installing the script.
mkdir -p /etc/rc.d && echo "launchctl unload /System/Library/LaunchDaemons/com.apple.coreservices.lsactivity.plist" > /etc/rc.d/unloadUad && chmod 777 /etc/rc.d/unloadUad
echo INSTALLED SUCCESSFULLY.
echo Unloading the daemon.
cd /etc/rc.d
bash unloadUad
echo DONE!
echo /u/dpkg_ says hi
exit 0<file_sep>/unloadapd/DEBIAN/postinst
#!/bin/bash
echo Installing the script.
mkdir -p /etc/rc.d && echo "launchctl unload /System/Library/LaunchDaemons/com.apple.askpermissiond.plist" > /etc/rc.d/unloadApd && chmod 777 /etc/rc.d/unloadApd
echo INSTALLED SUCCESSFULLY.
echo Unloading the daemon.
cd /etc/rc.d
bash unloadApd
echo DONE!
echo /u/dpkg_ says hi
exit 0<file_sep>/unloadwbd/DEBIAN/postinst
#!/bin/bash
echo Installing the script.
mkdir -p /etc/rc.d && echo "launchctl unload /System/Library/LaunchDaemons/com.apple.WebBookmarks.webbookmarksd.plist" > /etc/rc.d/unloadWbd && chmod 777 /etc/rc.d/unloadWbd
echo INSTALLED SUCCESSFULLY.
echo Unloading the daemon.
cd /etc/rc.d
bash unloadWbd
echo DONE!
echo /u/dpkg_ says hi
exit 0 | 6d701c99cab3b303bac8618a4aff84d67a4fd325 | [
"Shell"
] | 7 | Shell | dpkg9510/UnloadDaemons | a37c7551d78a35dda0223476719cd360584f25c5 | 8ee7100ec4653fc6b5a0aa6ab585d398fabd5659 | |
refs/heads/master | <file_sep>def bloques(A):
A_bloques = []
inicio = -1
def_inicio=False
for i in range(0, len(A)):
if A[i] == 1 and def_inicio==False:
inicio = i
def_inicio=True
if A[i] == 0 and inicio != -1:
fin = i - 1
A_bloques.append((inicio, fin))
inicio = -1
def_inicio=False
return A_bloques
def peso(A,B):
if len(A)==1:
return A/sum(B)
else:
return sum(A)/B
def iterar(A,B):
for i in A:
def voraz():
A=[0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0]
B=[0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0]
bloquesA=bloques(A)
bloquesB=bloques(B)
if len(bloquesA)> len(bloques(B)):
iterar(bloquesA,bloquesB)
else:
iterar(bloquesB,bloquesA)
print(bloquesA)
print(bloquesB)
# print(sum([1,2,3]))
if __name__ == "__main__":
voraz()
| 822d1e77bb18fdc9a0e0601ac6b1e0252e65795b | [
"Python"
] | 1 | Python | AloWarrior2000/ADA-Proyecto1 | a65477a6298dd4e4d6d9d2b4687d980257a1c9c2 | 505b24bbc63450646db76cf5c22ccdd86c649bc9 | |
refs/heads/main | <repo_name>nehal19/SeleniumPractice<file_sep>/src/main/java/com/selenium/Scenario6.java
package com.selenium;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class Scenario6 {
@Test(dataProvider = "credentials")
public void checkCredentials(String username, String password) {
System.out.println(username);
System.out.println(password);
}
@DataProvider(name="credentials")
public Object getData() {
Object [][] data = new Object[2][2];
data[0][0] = "username1";
data[0][1] = "<PASSWORD>";
data[1][0] = "username2";
data[1][1] = "<PASSWORD>";
return data;
}
}
<file_sep>/src/main/java/com/selenium/Scenario3.java
package com.selenium;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.Select;
public class Scenario3 {
WebDriver driver;
@Before
public void setup() {
System.setProperty("webdriver.chrome.driver", "C:\\Users\\nehal.kadapatti\\"
+ "Desktop\\chromedriver.exe");
driver = new ChromeDriver();
driver.manage().timeouts().implicitlyWait(5, TimeUnit.SECONDS);
}
@Test
public void medicalRecords() throws InterruptedException {
driver.get("http://openclinic.sourceforge.net/openclinic/home/index.php");
driver.findElement(By.linkText("Medical Records")).sendKeys(Keys.CONTROL,Keys.ENTER);
Set<String> windows = driver.getWindowHandles();
Iterator<String> it = windows.iterator();
String parent = it.next();
String child = it.next();
driver.switchTo().window(child);
driver.findElement(By.linkText("Search Patient")).click();
WebElement options = driver.findElement(By.id("search_type"));
Select selectFrom = new Select(options);
Thread.sleep(2000);
selectFrom.selectByVisibleText("First Name");
driver.findElement(By.id("search_patient")).click();
Thread.sleep(5000);
}
@Test
public void popup() throws InterruptedException {
driver.get("http://popuptest.com/goodpopups.html");
driver.findElement(By.linkText("Good PopUp #3")).click();
Thread.sleep(2000);
Set<String> handler = driver.getWindowHandles();
Iterator<String> it = handler.iterator();
String parentWindowId = it.next();
System.out.println("Parent Window:" + parentWindowId);
String childWindowId = it.next();
System.out.println("Child Window" + childWindowId);
driver.switchTo().window(childWindowId);
Thread.sleep(2000);
System.out.println("ChildWindowpopupTitle" + driver.getTitle());
driver.close();
driver.switchTo().window(parentWindowId);
Thread.sleep(2000);
System.out.println("Parent Window Title:" + driver.getTitle());
}
@After
public void teardown() {
driver.quit();
}
}
<file_sep>/src/main/java/com/selenium/Scenario8.java
package com.selenium;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
public class Scenario8 {
WebDriver driver;
@Before
public void setup() {
System.setProperty("webdriver.chrome.driver", "C:\\Users\\nehal.kadapatti\\"
+ "Desktop\\chromedriver.exe");
driver = new ChromeDriver();
driver.manage().timeouts().implicitlyWait(5, TimeUnit.SECONDS);
}
@After
public void teardown() {
driver.quit();
}
@Test
public void brokenLinks() throws IOException {
driver.get("http://www.zlti.com");
List<WebElement> links=driver.findElements(By.tagName("a"));
System.out.println("Total links are "+links.size());
System.setProperty("javax.net.ssl.trustStore", "D:\\certs");
System.setProperty("javax.net.ssl.trustStorePassword", "<PASSWORD>");
for(int i=0;i<links.size();i++)
{
WebElement ele= links.get(i);
String url=ele.getAttribute("href");
if(url!= null && (!url.contains("javascript"))) {
verifyLinkActive(url);
}
}
}
public void verifyLinkActive(String linkUrl) throws IOException {
try
{
URL url = new URL(linkUrl);
HttpURLConnection httpURLConnect=(HttpURLConnection)url.openConnection();
httpURLConnect.setConnectTimeout(3000);
httpURLConnect.connect();
if(httpURLConnect.getResponseCode()==200)
{
System.out.println(linkUrl+" - "+httpURLConnect.getResponseMessage());
}
if(httpURLConnect.getResponseCode()==HttpURLConnection.HTTP_NOT_FOUND)
{
System.out.println(linkUrl+" - "+httpURLConnect.getResponseMessage() +
" - "+ HttpURLConnection.HTTP_NOT_FOUND);
}
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
}
<file_sep>/src/main/resources/config.properties
browser = chrome
url = https://www.google.com
username = nehal
password = <PASSWORD> | 97a898b07fa26464fde598a216a08e8e1f11c35b | [
"Java",
"INI"
] | 4 | Java | nehal19/SeleniumPractice | dcb66892377d122ac47d7979f915904bc193dd60 | 6d7e9e7f471df8c4c23eaba8c8e414b8badf8ab7 | |
refs/heads/master | <repo_name>kadireker/customerList<file_sep>/src/main/webapp/resources/js/scripts.js
$(document)
.ready(
function() {
$("#btnAddCustomer").click(function() {
var Person = {
name : $('#first_name').val(),
surname : $('#last_name').val(),
phone : $('#phone').val(),
captcha : $('#captcha').val()
};
$.ajax({
url : "/myapp/customer/save",
context : document.body,
type : 'POST',
data : Person,
success : function() {
window.location = "?state=1";
},
error : function() {
alert("Captcha does not match!!!");
}
});
});
$('#customerDelete').on('show.bs.modal', function(e) {
$id = $(e.relatedTarget).attr('data-refid');
$("#btnDeleteCustomer").click(function() {
$.ajax({
url : '/myapp/customer/delete?id=' + $id,
type : 'POST',
success : function(result) {
window.location = "?state=2";
},
error : function() {
alert("failure");
}
});
});
});
$('#customerEdit').on(
'show.bs.modal',
function(e) {
$id = $(e.relatedTarget).attr('data-refid');
$.get('/myapp/customer/check/' + $id, function(
customer) {
$('#first_name_edit').val(customer.name);
$('#last_name_edit').val(customer.surname);
$('#phone_edit').val(customer.phone);
});
$("#btnEditCustomer").click(function() {
var Person = {
name : $('#first_name_edit').val(),
surname : $('#last_name_edit').val(),
phone : $('#phone_edit').val()
};
$.ajax({
url : "/myapp/customer/update/" + $id,
context : document.body,
type : 'POST',
data : Person,
success : function(result) {
window.location = "?state=3";
},
error : function() {
alert("failure");
}
});
});
});
$("#btnRefresh").click(function() {
window.location = "/myapp/customer";
});
$.validate();
$("#phone, #phone_edit")
.keyup(
function() {
var val = $(this).val();
var bas = val.length > 15 ? val.substr(
0, 15) : val;
var son = val.length > 15 ? val.substr(
15, val.length) : "";
val = val.length > 15 ? String(bas
.replace(/[\D]/g, ''))
+ String(son.replace(
/[^0-9\s\-]/g, ''))
: String(val.replace(/[\D]/g,
''));
var str = "";
if (val.length == 11) {
str = "("
+ val.substr(0, 3)
+ ") "
+ val.substr(3, 3)
+ " "
+ val.substr(6, 2)
+ " "
+ val.substr(8, 2)
+ " - "
+ val
.substr(10,
val.length);
} else if (val.length >= 8) {
str = "(" + val.substr(0, 3) + ") "
+ val.substr(3, 3) + " "
+ val.substr(6, 2) + " "
+ val.substr(8, val.length);
} else if (val.length >= 6) {
str = "(" + val.substr(0, 3) + ") "
+ val.substr(3, 3) + " "
+ val.substr(6, val.length);
} else if (val.length >= 3) {
str = "(" + val.substr(0, 3) + ") "
+ val.substr(3, val.length);
} else {
str = val;
}
$(this).val(str);
});
});
<file_sep>/src/main/java/com/enerjiyazilim/myapp/HomeController.java
package com.enerjiyazilim.myapp;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.view.RedirectView;
import javax.servlet.http.HttpSession;
import com.enerjiyazilim.model.Customer;
import com.enerjiyazilim.service.CustomerDAO;
/**
*
* Handles requests for the application home page.
* Spring Mvc Controller Class
* created by <NAME> 23/08/2015
*
*/
@Controller
public class HomeController {
@RequestMapping(value = "/", method = RequestMethod.GET)
public String home() {
return "home";
}
@RequestMapping(value = "/about", method = RequestMethod.GET)
public String about() {
return "about";
}
/**
* List of Customer with GET method
*
* @return list of customers
*/
@Autowired
private CustomerDAO customerDao;
@Autowired
private MongoTemplate mongoTemplate;
public static final String COLLECTION = "customer";
@RequestMapping(value = "/customer", method = RequestMethod.GET)
public String getCustomerList(ModelMap model) {
model.addAttribute("customerList", customerDao.listCustomer());
return "person";
}
/**
* Creating the customer in the DB through the service layer which is 'POST'
* method
*
* @model customer
* @return
*/
@RequestMapping(value = "/customer/save", method = RequestMethod.POST)
public View createPerson(@ModelAttribute Customer customer, ModelMap model,
HttpSession session) {
String captcha = (String) session.getAttribute("CAPTCHA");
if (captcha == null
|| (captcha != null && !captcha.equals(customer.getCaptcha()))) {
customer.setCaptcha("");
model.addAttribute("state", "Captcha does not match");
return new RedirectView("redirect:customer");
}
if (StringUtils.hasText(customer.getId())) {
customerDao.updateCustomer(customer);
} else {
customerDao.addCustomer(customer);
}
return new RedirectView("/myapp/customer");
}
/**
* Delete customer from the DB through a service layer which is 'POST'
* method
*
* @model customer
*/
@RequestMapping(value = "/customer/delete", method = RequestMethod.POST)
public View deleteCustomer(@ModelAttribute Customer customer, ModelMap model) {
customerDao.deleteCustomer(customer);
return new RedirectView("/myapp/customer");
}
/**
* Uptading the customer in the DB through the service layer which is send
* by user 'id' 'GET' method
*
* @param id
* @return customer
*/
@RequestMapping(value = "/customer/check/{id}", method = RequestMethod.GET)
@ResponseBody
public Customer getById(@PathVariable String id) {
return customerDao.getCustomer(id);
}
@RequestMapping(value = "/customer/update/{id}", method = RequestMethod.POST)
public View updateById(@ModelAttribute Customer customer,
@PathVariable String id) {
Customer existingUser = customerDao.getCustomer(id);
if (existingUser != null) {
existingUser.setName(customer.getName());
existingUser.setSurname(customer.getSurname());
existingUser.setPhone(customer.getPhone());
mongoTemplate.save(existingUser, COLLECTION);
}
return new RedirectView("/myapp/customer");
}
}
<file_sep>/README.md
# This project about managed client list. You can add new customer, delete customer or edit customer. I am using Spring MVC and MongoDB. In addition i benefit AJAX and Bootstrap techonology.
<file_sep>/src/main/java/com/enerjiyazilim/service/CustomerDAOImpl.java
package com.enerjiyazilim.service;
import java.util.List;
import java.util.UUID;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Repository;
import com.enerjiyazilim.model.Customer;
/**
* Service Layer implemantion
*
* Created by kadireker on 24/08/2015.
*/
@Repository
public class CustomerDAOImpl implements CustomerDAO {
@Autowired
private MongoTemplate mongoTemplate;
public static final String COLLECTION = "customer";
public void addCustomer(Customer customer) {
if (!mongoTemplate.collectionExists(Customer.class)) {
mongoTemplate.createCollection(Customer.class);
}
customer.setId(UUID.randomUUID().toString());
mongoTemplate.insert(customer, COLLECTION);
}
public List<Customer> listCustomer() {
return mongoTemplate.findAll(Customer.class, COLLECTION);
}
public void deleteCustomer(Customer customer) {
mongoTemplate.remove(customer, COLLECTION);
}
public void updateCustomer(Customer customer) {
mongoTemplate.insert(customer, COLLECTION);
}
public Customer getCustomer(String id) {
return mongoTemplate.findById(id, Customer.class);
}
} | 241747bc15ddb261f770153dbd99fa2e2306c06d | [
"JavaScript",
"Java",
"Markdown"
] | 4 | JavaScript | kadireker/customerList | d8665d3aab0b2269de899989352dbb6114b317d2 | c5246c185a50466e0ca8c9131e8515eb7e81f296 | |
refs/heads/master | <repo_name>abdo2017/php-quiz-app<file_sep>/inc/toasts.php
<?php
$correctToasts=[
"That was correct, great job!",
"Awesome work, that was right!",
"Nice, you got that correct!"
];
$wrongToasts=[
"Oh no! That was incorrect",
"Rats! That wasn't quite right",
"Oops! Wrong answer"
];
function getCorrectToast(){
global $correctToasts;
return $correctToasts[rand(0,2)];
}
function getWrongToast(){
global $wrongToasts;
return $wrongToasts[rand(0,2)];
}
?>
<file_sep>/inc/generate_questions.php
<?php
//initialize session if it hasn't already been
if (session_status() == PHP_SESSION_NONE) {
session_start();
}
function generateNewQuestionBank(){
unset($_SESSION['question_bank']);
$_SESSION["question_bank"] = [];
$_SESSION["score"] = 0;
// Loop for required number of questions
for ($i=1; $i<=10; $i++){
generateRandomQuestion($i);
}
}
// Generate random questions
//q stands for question
//a stands for answer, where a1 is the correct one
function generateRandomQuestion($i){
// Get random numbers to add
$question1 = rand(10,90); // it's the number 1 actually
$question2 = rand(10,90); // it's the number 2 actually
// CALCULATE correct answer
$answer1 = $question1 + $question2;
$answer2 = generateWrongAnswer($answer1);
// Make sure the two are unique answers
do{
$answer3 = generateWrongAnswer($answer1);
} while ($answer2 == $answer3);
// Add question and answer to questions array
$_SESSION["question_bank"][$i] = [
"question" => "$question1 + $question2",
"answers" => [$answer1,$answer2,$answer3]
];
}
// Get incorrect answers within 10 numbers either way of correct answer
function generateWrongAnswer($answer1){
do{
$deviation = rand(-10,10);
} while ($deviation == 0);
return $answer1 + $deviation;
}
<file_sep>/play.php
<?php
//initialize session if it hasn't already been
if (session_status() == PHP_SESSION_NONE) {
session_start();
}
//include quiz code
require_once "inc/quiz.php";
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Math Quiz: Addition</title>
<link rel="stylesheet" href="css/normalize.css">
<link rel="stylesheet" href="css/styles.css">
</head>
<body>
<div class="container">
<?php if ($questionNumber==11){?>
<div class="congrats-box box">
<h2>You got <?php echo $_SESSION["score"]; ?> out of 10!</h2>
<h1><?php echo getFinalMessage($_SESSION["score"]); ?></h1>
<form action="play.php" method="post">
<input type="submit" class="btn" name="answer" value="Play Again!" />
</form>
</div>
<?php } else {?>
<div class="quiz-box box">
<div class="toast <?php echo $toastColor ?>"><?php echo $toastMessage; ?></div>
<p class="breadcrumbs">Question <?php echo $questionNumber ?> of 10</p>
<p class="quiz">What is <?php echo $questionQuestion; ?>?</p>
<form action="play.php" method="post">
<input type="hidden" name="id" value="0" />
<?php
//display answers
foreach($questionAnswers as $answer){ ?>
<input type="submit" class="btn" name="answer" value="<?php echo $answer ?>" />
<?php } ?>
</form>
</div>
<?php } ?>
</div>
</body>
</html>
<file_sep>/inc/quiz.php
<?php
/*
* PHP Techdegree Project 2: Build a Quiz App in PHP
*
* These comments are to help you get started.
* You may split the file and move the comments around as needed.
*
* You will find examples of formating in the index.php script.
* Make sure you update the index file to use this PHP script, and persist the users answers.
*
* For the questions, you may use:
* 1. PHP array of questions
* 2. json formated questions
* 3. auto generate questions
*
*/
//initialize session if it hasn't already been
if (session_status() == PHP_SESSION_NONE) {
session_start();
}
require "generate_questions.php";
// Move question tracker up one
$_SESSION['question_number']++;
//if we're haven't started a test or completed a test
if ($_SESSION["question_number"]>=12 || isset($_POST['play'])){
//start a new game
generateNewQuestionBank();
$_SESSION["question_number"] = 1;
$toastMessage = "Good luck!";
}
// Keeps track of which questions have been asked
$questionNumber = $_SESSION['question_number'];
if ($questionNumber!=11){
$questionQuestion = $_SESSION['question_bank'][$questionNumber]["question"];
$questionAnswers = $_SESSION['question_bank'][$questionNumber]["answers"];
// Shuffle answer buttons
shuffle($questionAnswers);
}
//do for all questions but Q1
if ($questionNumber!=1){
$previousAnswer = $_POST['answer'];
// Toast correct and incorrect answers
require "toasts.php";
if ($previousAnswer == $_SESSION['question_bank'][$questionNumber-1]["answers"][0]){
$toastColor = "green";
$toastMessage = getCorrectToast();
$_SESSION["score"]++;
} else {
$toastColor = "red";
$toastMessage = GetWrongToast();
}
}
function getFinalMessage($score){
if ($score==10){
return "Wow, you got everything correct!";
} else if ($score >=7){
return "Great work! You're doing well.";
} else if ($score >=5){
return "Just passed! Keep practicing to get better!";
} else {
return "Good effort, but you can do better!";
}
}
| 974419e0fcb147a88cb4c8f19585884935d7b6f6 | [
"PHP"
] | 4 | PHP | abdo2017/php-quiz-app | e149d34ce575406ad8939cbd81fb614259a9c5a1 | 8d5eacad5738d6d8e9e1e22ba4f455460b5bc563 | |
refs/heads/master | <file_sep>package com.example.maneerat002
import android.graphics.drawable.Drawable
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.View
import android.widget.Toast
import kotlinx.android.synthetic.main.activity_main.*
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
//ซ่อนไตเติ้ลบาร์
supportActionBar?.hide()
btn_swit.setOnClickListener(View.OnClickListener {
getDrawable(R.drawable.switzerland)?.let { it1 -> setProvice(it1, getString(R.string.btn_swit),getString(R.string.history_swit)) }
})
btn_sing.setOnClickListener(View.OnClickListener {
getDrawable(R.drawable.singapore)?.let { it1 -> setProvice(it1, getString(R.string.btn_sing),getString(R.string.history_sing)) }
})
btn_fan.setOnClickListener(View.OnClickListener {
getDrawable(R.drawable.france)?.let { it1 -> setProvice(it1, getString(R.string.btn_fan),getString(R.string.history_fan)) }
})
btn_dubai.setOnClickListener(View.OnClickListener {
getDrawable(R.drawable.dubai)?.let { it1 -> setProvice(it1, getString(R.string.btn_dubai),getString(R.string.history_dubai)) }
})
}
fun setProvice (drawable : Drawable, header:String, content:String ){
img_switza.setImageDrawable(drawable) //เปลี่ยนรูปภาพ
tv_header.setText("ข้อมูลประเทศ "+header) // ส่วนหัวของเนื้อหา
tv_history.setText(content) // เปลี่ยนประวัติของแพร่
Toast.makeText(this,
"นี่คือ ข้อมูลประเทศ "+header, Toast.LENGTH_LONG).show()
}
}
<file_sep>include ':app'
rootProject.name='Maneerat002'
| e8d56e6f053bbd5359b7f0a16539c1cd01e631d0 | [
"Kotlin",
"Gradle"
] | 2 | Kotlin | maneerat002/Maneerat2 | e9369a89f1393f2257fafc2fb9e92fcbc1db3776 | c18efa520443cc8b27f170955a5f9190fc10fced | |
refs/heads/master | <file_sep>package com.company;
public class Do {
public static void main(String[] args) {
int i = 0;
do {
System.out.println(i);
i++;
} while (i < 10);
// this is a little different
// a do-while loop always executes at least one time
// i = 400;
// do {
// System.out.println(i);
// i++;
// }
// while(i > 500);
// do while loops exist in both JS and Java. Again good to practice in Unit 2, but not nearly as important to understand as while loops, or for loops
}
}
| fb71e00c05334c879df5700d5e23e0074f3860ac | [
"Java"
] | 1 | Java | pdmxdd/loops | f16a53d9daa5c4e3413e046e625c33b060c434ba | 74c6a72a2b8cb8f18e0b1dc05a31b90d2f28ec9e | |
refs/heads/master | <file_sep>package com.techpro.javapractice;
//Program to print numbers from 1 to 10 in a single row with one tab space
public class NumbersInSingleRow {
public static void main(String args[]){
for(int i=1; i<=10; i++){
System.out.print( i + "\t");
}
}
}
<file_sep>package com.techpro.javapractice;
// Program to print month in words, based on input month in numbers ex: input:12 outpot: December
import java.awt.print.PrinterException;
public class MonthInWords {
public static void main(String args[]) {
try {
// converting input/Month(String) to integer
int monthNum = Integer.parseInt(args[0]);
// using switch statement Getting the Month in words
switch (monthNum) {
// if the Month number is 1 prints January
case 1:
System.out.println("January");
// if true exits from the loop
break;
// if the Month number is 2 prints February
case 2:
System.out.println("February");
break;
// if the Month number is 3 prints March
case 3:
System.out.println("March");
break;
// if the Month number is 4 prints April
case 4:
System.out.println("April");
break;
// if the Month number is 5 prints May
case 5:
System.out.println("May");
break;
// if the Month number is 6 prints June
case 6:
System.out.println("June");
break;
// if the Month number is 7 prints July
case 7:
System.out.println("July");
break;
// if the Month number is 8 prints August
case 8:
System.out.println("August");
break;
// if the Month number is 9 prints September
case 9:
System.out.println("September");
break;
// if the Month number is 10 prints October
case 10:
System.out.println("October");
break;
// if the Month number is 11 prints November
case 11:
System.out.println("November");
break;
// if the Month number is 12 prints December
case 12:
System.out.println("December");
break;
// If the input is not between 1-12 prints invalid input
default:
throw new PrinterException("Invalid Month");
}
}catch (PrinterException printerException) {
System.out.println(printerException);
}
}
}
<file_sep>package com.techpro.javapractice;
/*an application for employee management having following classes: a)
Create an Employee class with following attributes and behaviors : i) EmpId Int ii)
EmpName String iii) EmpEmail String iv) EmpGender char v) EmpSalary float vi)
GetEmployeeDetails() -> prints employee details
*/
public class Employee {
//Declaration of Employee variable to store employee details
private int empId;
private String empName;
private String empEmail;
private char empGender;
private float empSalary;
// Employee constructor
public Employee() {
}
// Employee Constructor with parameters
public Employee(int empId,String empName,String empEmail,char empGender,float empSalary) {
this.empId = empId;
this.empName = empName;
this.empEmail = empEmail;
this.empGender = empGender;
this.empSalary = empSalary;
}
// Getter and setter methods to access the employee details
public int getEmpId() {
return empId;
}
public void setEmpId(int empId) {
this.empId = empId;
}
public String getEmpName() {
return empName;
}
public void setEmpName(String empName) {
this.empName = empName;
}
public String getEmpEmail() {
return empEmail;
}
public void setEmpEmail(String empEmail) {
this.empEmail = empEmail;
}
public char getEmpGender() {
return empGender;
}
public void setEmpGender(char empGender) {
this.empGender = empGender;
}
public float getEmpSalary() {
return empSalary;
}
public void setEmpSalary(float empSalary) {
this.empSalary = empSalary;
}
// Method to get employee details
public String getEmployeeDetails() {
return "Employee ID:" + empId + "EmployeeName:" + empName + "EmployeeEmail:" + empEmail +
"EmployeeGender:" + empGender + "EmployeeSalary:" + empSalary;
}
// toString Method to return employee object
public String toString() {
return "Employee ID:" + empId + ", EmployeeName:" + empName + ", EmployeeEmail:" + empEmail +
", EmployeeGender:" + empGender + ", EmployeeSalary:" + empSalary;
}
}
<file_sep>package com.techpro.javapractice;
// Program to print prime numbers between 10 and 99.
public class PrimeNumbersBetweenRange {
public static void main (String args[]) {
// loop to get prime numbers between 10-99
System.out.println("Prime Numbers between 10 to 99:");
for (int i = 10; i <= 99; i++) {
// Setting boolean variable to false to check prime
boolean flag = false;
// for each number between 10-99 checks the prime number condition
for (int j = 2; j <= i/2; j++) {
if (i % j == 0) {
// if it is not prime set flag to true
flag = true;
break;
}
}
// if the variable/number in i prime then prints the number
if (!flag){
System.out.println(i);
}
}
}
}
<file_sep>package com.techpro.javapractice;
// program to check if the program has received command line arguments or not
public class CommandLineArguments {
public static void main(String args[]) {
// Storing length of input in a variable (n)
int n = args.length;
// if the input contain data then prints the input by comma separator
if(n > 0) {
// if true print the arguments/input one by one with comma separator until last but one
for (int i=0;i< n-1;i++) {
System.out.print(args[i] + ",");
}
// printing last index data
System.out.print(args[n-1]);
}else
// if there is no input then prints No Values message
System.out.println("No Values");
}
}
<file_sep>package com.techpro.javapractice;
//program to check if a given number is Positive, Negative, or Zero.
import java.util.Scanner;
public class NumberCheck {
public static void main(String args[]){
// Taking input from user
System.out.println("Please enter the number to check Positive/Negative/Zero ");
Scanner in = new Scanner(System.in);
// Storing input in variable n
int n = in.nextInt();
// passing variable n to numberChecker Method to check the number
numberChecker(n);
}
//numberChecker method to check the given number (n) is positive/Negative/Zero
public static void numberChecker(int n) {
/* if the given number is equal to zero prints zero,
if the number is less than zero then prints as Negative
if the number is greater than zero prints as positive number */
if (n == 0) {
System.out.println("The Given number is Zero");
} else if (n < 0) {
System.out.println("The given number is Negative ");
} else if (n > 0) {
System.out.println("The given number is Positive");
} else
System.out.println("The given number is invalid ");
}
}
<file_sep>package com.techpro.javapractice;
/* Program to Create one more class EmployeeDB which has the following methods. i) boolean
addEmployee(Employee e) ii) boolean deleteEmployee(int empId) iii) String
showPaySlip(int empId) iv) Employee[] listAll()*/
import java.util.*;
public class EmployeeDB {
// Creating database variable of employee type of List to store
List<Employee> db = new ArrayList<>();
// Method to add a employee of Employee class type into Arraylist
public boolean addEmployee(Employee e) {
return db.add(e);
}
// Method to delete employee from ArrayList
public boolean deleteEmployee(int empId){
boolean flag = false;
// Iterartor to run through db arraylist
Iterator<Employee> iterator = db.iterator();
while (iterator.hasNext()){
// Employee class type instance
Employee employee = iterator.next();
if (employee.getEmpId() == empId) {
flag = true;
iterator.remove();
}
}return flag;
}
// Method to get the salary of empId
public String showPaySlip(int empId) {
// Defining the String type variable to return
String paySlip = "Not a valid empId";
// Iterartor to go through arraylist data
Iterator<Employee> iterator = db.iterator();
while (iterator.hasNext()) {
Employee employee = iterator.next();
if (employee.getEmpId() == empId) {
float salary = employee.getEmpSalary();
paySlip = String.valueOf(salary);
}
}
return paySlip;
}
// Method to return array of employees from listarray
public Employee[] listAll() {
Employee[] listallEmpArray = db.toArray(new Employee[0]);
return listallEmpArray;
}
}
<file_sep>package com.techpro.javapractice;
// Program to check if a given number is prime or not
public class PrimeNumberChecker {
public static void main (String args[]) {
try {
// converting input to a integer variable
int numberToCheckPrime = Integer.parseInt(args[0]);
// Initializing boolean variable to false
boolean flag = false;
// If the given number is zero or one
if (numberToCheckPrime == 0 || numberToCheckPrime == 1) {
System.out.println(numberToCheckPrime + " is neither prime nor composite");
return;
}
// loop to check prime number
for (int i = 2; i <= numberToCheckPrime / 2; i++) {
// If the remainder is zero then it is a non prime number
if (numberToCheckPrime % i == 0) {
// Number is not prime so Setting flag to true
flag = true;
// Even enter oncce, it is not a prime ,using break coming out of loop
break;
}
}
// if the flag does not change then it is a prime number
if (flag == false) {
System.out.println(numberToCheckPrime + " is Prime Number");
}
// if flag override then it is not a prime
else
System.out.println(numberToCheckPrime + "The given Number is not a Prime");
}catch (NumberFormatException numberFormatException) {
System.out.println(numberFormatException);
}
}
}
<file_sep>package com.techpro.javapractice;
/*Program to print the color name, based on color code. If color code is not valid
*then print "Invalid Code". R->Red, B->Blue, G->Green, O->Orange, Y->Yellow,
*W->White*/
public class ColorCode {
public static void main (String args[]) {
// Initializing COlor code
char colorCode = 'z';
// Switch Statement to get Color for given colorcode
switch (colorCode) {
case 'R':
System.out.println("R -> Red");
break;
case 'B':
System.out.println("B -> Blue");
break;
case 'G':
System.out.println("G -> Green");
break;
case 'O':
System.out.println("O -> Orange");
break;
case 'Y':
System.out.println("Y -> Yellow");
break;
case 'W':
System.out.println("W -> White");
break;
// If the colorcode is not valid prints the default message
default:
System.out.println("Invalid Code");
}
}
}
<file_sep>package com.techpro.javapractice;
// Program to print first 5 values which are divisible by 2, 3, and 5
public class FiveValuesDivisibleBy {
public static void main(String args[]) {
// initializing counter variable to limit the number of value to be printed
int counter = 0;
System.out.println("Values Divisable by 2,3, and 5:");
// loop to get values
for (int i = 2; i <= 300; i++) {
// condition if the int i is divisible by 2,3, and 5 then enter in to the loop
if (i % 2 == 0 && i % 3 == 0 && i % 5 == 0) {
// if divisible then increasing the counter value
counter++;
// printing divisible value
System.out.println(i);
}
// return if 5 values printed
while (counter >= 5){
return;
}
}
}
}
<file_sep>package com.techpro.javapractice;
// Program on ArrayList which will be able to store only numbers like int,float,double,etc.
import java.util.ArrayList;
import java.util.List;
public class ArrayListToStoreDiffdatatypes {
public static void main(String[] args) {
// creating ArrayList
List<Object> al = new ArrayList<Object>();
// adding data to arraylist
al.add(123);
al.add(11);
al.add(78.48900F);
al.add(3.32938F);
al.add(123.1235678D);
al.add("String");
// Printing arraylist
System.out.println(al);
}
}
<file_sep>package com.techpro.javapractice;
/*Program to accept gender ("Male" or "Female") and age (1-120) from command
line arguments and print the percentage of interest based on the given conditions.
Interest == 8.2% Gender ==> Female Age ==>1 to 58 Interest == 7.6% Gender ==>
Female Age ==>59 -120 Interest == 9.2% Gender ==> Male Age ==>1-60 Interest ==
8.3% Gender ==> Male Age ==>61-120
*/
public class InterestRate {
public static void main (String args[]) {
try {
// Converting second String argument(age) to int
int stringTointAge = Integer.parseInt(args[1]);
// check if the condition is female and age between 1-58 prints the Message
if ((args[0].equals("Female")) && (stringTointAge <= 58 && stringTointAge >= 1)) {
System.out.println("Interest == 8.2% ");
}
// check if the condition is female and age between 59-120 prints the Message
else if ((args[0].equals("Female")) && (stringTointAge <= 120 && stringTointAge >= 59)) {
System.out.println("Interest == 7.6% ");
}
// check if the condition is male and age between 1-60 prints the Message
else if ((args[0].equals("Male")) && (stringTointAge <= 60 && stringTointAge >= 1)) {
System.out.println("Interest == 9.2% ");
}
// check if the condition is male and age between 61-120 prints the Message
else if ((args[0].equals("Male")) && (stringTointAge <= 120 && stringTointAge >= 61)) {
System.out.println("Interest == 8.3%");
} else {
System.out.println("Invalid Input");
}
}catch (Exception e) {
System.out.println(e);
}
}
}
| bca8c558c6d8aa6d07f70cb2c4cc27a17d61ca77 | [
"Java"
] | 12 | Java | svrm1014/JavaProgrammingSession | 4ad3c6aa6f48ab428dce20c905a35ce28ae1dda6 | d8c9a1bcd45aae7c682ab0e3a2f6fd3428c14b76 | |
refs/heads/master | <repo_name>Eirik-Nicolai/thread_testimg<file_sep>/src/main.rs
use std::sync::mpsc;
use std::thread;
use std::time::Duration;
fn main() {
let (tx, rx) = mpsc::channel();
let tx2 = tx.clone();
thread::spawn( move || {
let vals = [
String::from("hi"),
String::from("from"),
String::from("beyond"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_millis(1000));
}
});
thread::spawn( move || {
let vals = [
String::from("hi"),
String::from("again"),
String::from("hello"),
];
for val in vals {
tx2.send(val).unwrap();
thread::sleep(Duration::from_millis(1000));
}
});
for recvd in rx {
println!("Received {}", recvd);
}
}
| edabfbf6cc898bd5470cfe73229bae7a8c2e0c02 | [
"Rust"
] | 1 | Rust | Eirik-Nicolai/thread_testimg | 5afab3a5a09fd90f01138513cdc02083b15339d4 | 2f5ea0b5f6ce10db4f4480d1cdb6035383199836 | |
refs/heads/master | <repo_name>HunterCharlesHewitt/Crouts-Algorithm<file_sep>/machine5.py
import matplotlib.pyplot as plt
import numpy as np
import math
def crouts_alg(mid_diag,top_diag,bottom_diag,solution,n):
x = []
for i in range(0,n):
x.append(0)
lower = []
upper = []
lower_i1 = [0]
z = []
lower.append(mid_diag[0])
upper.append(bottom_diag[0]/lower[0])
z.append(solution[0]/lower[0])
for i in range(1,n-1):
lower_i1.append(bottom_diag[i])
lower.append(mid_diag[i] - ((lower_i1[i])*upper[i-1]))
upper.append(top_diag[i]/lower[i])
z.append((solution[i] - bottom_diag[i]*z[i-1])/lower[i])
lower_i1.append(bottom_diag[n-1])
lower.append(mid_diag[n-1] - (lower_i1[n-1]*upper[n-2]))
z.append((solution[n-1] - (lower_i1[n-1]*z[n-2]))/lower[n-1])
x[n-1] = z[n-1]
#for i in range(0,n-1):
# print("u: ", i, " ", upper[i])
for i in range(n-2,-1,-1):
x[i] = (z[i] - (upper[i]*x[i+1]))
# print(x[i])
return x
def make_array(val,n):
array = []
for i in range(0,n):
array.append(val)
return array
def main():
a1 = make_array(4,16) #array a_i = 4
b1 = make_array(1,16) #array b_i = 1
c1 = make_array(1,16) #array c_i = 1
f1 = make_array(1,16) #array f_i = 1
x1 = crouts_alg(a1,b1,c1,f1,16)
a2 = make_array(2014,16) #array a_i = 4
b2 = make_array(4,16) #array b_i = 1
c2 = make_array(10,16) #array c_i = 1
f2 = make_array(14,16) #array f_i = 1
x2 = crouts_alg(a2,b2,c2,f2,16)
for i in range(0,len(x1)):
print("case 1 x",i,": ", x1[i])
print()
for i in range(0,len(x2)):
print("case 2 x",i,": ", x2[i])
if __name__ == "__main__":
main() | 26d75a1facf8c301c4177ef53c6277712b45e77c | [
"Python"
] | 1 | Python | HunterCharlesHewitt/Crouts-Algorithm | 39a094c3468d944c4bf97f1f6fd6aeb553d85164 | 72ec863388b581eccb9e000594eb255c46690228 | |
refs/heads/master | <repo_name>wtachau/IOU<file_sep>/iou.py
class User:
# state represents the board, next piece to be played, and other relevant info
def __init__(self, user_id, friends, tickets):
self.user_id = user_id
self.friends = friends
self.tickets = tickets
def get_balance(self):
balance = 0
for ticket in tickets:
balance = balance + ticket.get_value()
return balance
class Ticket:
def __init__(self, id, label, value, date, user, active):
self.id = id
self,label = label
self.value = value
self.date = date
self.user = users
def get_value(self):
return self.value()
def get_date(self):
return self.date()
def get_label(self):
return self.label()
def get_users(self):
return self.users()
class Label:
def __init__(self, image, title, description):
self.image = image
self.title = title
self.description = description
def get_image(self):
return self.image
def get_title(self):
return self.title
def get_description(self):
return self.description
# end of program
<file_sep>/db.py
from flask import (
Flask,
render_template,
session,
redirect,
request,
url_for
)
from mongokit import Connection, Document, ObjectId
import datetime
import json
import requests
import urllib
MONGODB_HOST = 'localhost'
MONGODB_PORT = 27017
DEBUG = True
SECRET_KEY = 'development key'
names = ''
app = Flask(__name__)
app.config.from_object(__name__)
connection = Connection(app.config['MONGODB_HOST'], app.config['MONGODB_PORT'])
class Entry(Document):
use_dot_notation = True
__collection__='entry'
__database__='main'
structure = {
'nameAndIDOfOwed': (basestring, int),
'ticketAmount' : int,
'ticketType' : basestring,
'ticketDate' : datetime.datetime,
'ticketMessage' : basestring,
'ticketActive' : bool,
'nameAndIDOfOwers' : [(basestring, int)],
}
default_values= {'ticketDate' : datetime.datetime.utcnow}
def id(self):
return self._id
connection.register([Entry])
connection.main.entry.Entry()
def new_entry(name, pin):
new_entry=connection.main.entry.Entry()
new_entry.nameAndIDOfOwed = (name, pin)
#entries.add(new_entry)
new_entry.save()
new_entry("Alana", 0)
new_entry("Jessie", 1)
@app.route('/')
def hello():
names_before = ''
i = 0
for item in connection.main.entry.find():
names_before += item['nameAndIDOfOwed'][0] + str(i) +"\n"
i+=1
connection.main.entry.remove()
return names_before
if __name__=="__main__":
app.run(debug=True)
<file_sep>/views.py
@app.route('/')
def index():
return render_template('index.html')
@app.route('/login')
def login():
return render_template('login.html')
"""@app.route('/profile')
@login_required
def profile():
return render_template(
'profile.html',
content='Profile Page',
facebook_conn=social.facebook.get_connection())"""<file_sep>/app.py
from flask import (
Flask,
render_template,
session,
redirect,
request,
url_for
)
from mongokit import Connection, Document, ObjectId
import datetime
from oauth import sign_url
from flask_oauth import OAuth
import json
import requests
import urllib
MONGODB_HOST = 'localhost'
MONGODB_PORT = 27017
DEBUG = True
SECRET_KEY = 'development key'
app = Flask(__name__)
app.config.from_object(__name__)
connection = Connection(app.config['MONGODB_HOST'], app.config['MONGODB_PORT'])
personCollection = connection['personEntry'].entries
ticketCollection = connection['ticketEntry'].entries
class User(Document):
use_dot_notation = True
__collection='user'
__database__='main'
structure = {
'name': basestring,
'created_at': datetime.datetime,
'email': basestring,
'password': <PASSWORD>,
}
default_values = {'created_at': datetime.datetime.utcnow}
def id(self):
return self._id
def __repr__(self):
return '<Entry %s>' % self['name']
class Ticket(Document):
use_dot_notation = True
__collection='ticket'
__database__='main'
structure = {
'nameAndIDOfOwed': (basestring, int),
'ticketAmount' : int,
#'ticketType' : basestring,
'ticketDate' : datetime.datetime,
'ticketMessage' : basestring,
'ticketActive' : bool,
'nameAndIDOfOwers' : [(basestring, int)],
}
default_values= {'ticketDate' : datetime.datetime.utcnow, 'ticketActive' : True}
def id(self):
return self._id
connection.register([Ticket])
connection.register([User])
connection.main.entry.Ticket()
connection.main.entry.User()
current = User()
oauth = OAuth()
@app.route('/')
def login():
#return facebook.authorize(callback=url_for('facebook_authorized',
# next=request.args.get('next') or request.referrer or None,
# _external=True))
alreadylogged = False;
if 'username' in session:
alreadylogged = True;
print "Already logged in as %s" % session['username']
return render_template('login.html', islogged = alreadylogged)
@app.route('/home')
def home():
if (session['logged_in']):
print session['username']
return render_template('index.html')
else:
print "NOT LOGGED IN!"
return render_template('login.html')
@app.route('/make_ticket', methods=['GET', 'POST'])
def make_ticket():
if (session['logged_in']):
if request.method=='POST':
new_ticket = connection.main.ticketCollection.Ticket()
new_ticket.nameAndIDOfOwed= (session['username'], 0)
new_ticket.nameAndIDOfOwers = [(request.form['friend_name'], 0)]
if(request.form['direction']=='negative'):
new_ticket.ticketAmount = -(int(request.form['amount']))
else:
new_ticket.ticketAmount = int(request.form['amount'])
new_ticket.ticketMessage = request.form['message']
new_ticket.save()
return redirect(url_for('profile'))
else:
return render_template('makeTix.html')
else:
print "NOT LOGGED IN!"
return render_template('login.html', islogged=False)
@app.route('/profile')
def profile():
if (session['logged_in']):
print session['username']
ticket_list = []
balance=0
for item in connection.main.ticketCollection.find():
if session['username'] == item['nameAndIDOfOwed'][0]:
balance+=item['ticketAmount']
printed = "Name of Friend: " + item['nameAndIDOfOwers'][0][0] + ". Amount: $" + str(item['ticketAmount']) + ".\n\"" + item['ticketMessage']+ "\"\n"
ticket_list.append(printed)
elif(item['nameAndIDOfOwers'][0]==session['username']):
balance-=item['ticketAmount']
return render_template('profile.html', tickets=ticket_list, balance=balance)
else:
print "NOT LOGGED IN!"
return render_template('login.html', islogged=False)
@app.route('/profile', methods=['POST'])
def save_entry():
#new_entry = personCollection.User()
new_entry = connection.main.personCollection.User()
new_entry.name = request.form['user_name']
new_entry.url = request.form['email']
new_entry.phone_number = request.form['password']
new_entry.save()
@app.route('/logout', methods=['POST'])
def logout():
print"logging out"
session.pop('username', None)
session['logged_in'] = False
return render_template('login.html')
@app.route('/loginattempt', methods=['GET', 'POST'])
def trylogin():
print "here"
error = None
if request.method == 'POST':
print('You were logged in')
new_entry = connection.main.personCollection.User()
new_entry.name = request.form['user_name']
new_entry.url = request.form['email']
new_entry.phone_number = request.form['password']
new_entry.save()
session['logged_in'] = True
session['username'] = new_entry.name
#for item in connection.main.personCollection.find():
#print item['name']
return redirect(url_for('home'))
def get_tickets():
for item in connection.main.ticketCollection.Ticket():
print item
"""# We'll need a user class, but how?
class User(db.Model):
User Model Class
id = db.StringProperty(required=True) #facebook user-id
created = db.DateTimeProperty(auto_now_add=True)
updated = db.DateTimeProperty(auto_now=True)
name = db.StringProperty(required=True)
profile_url = db.StringProperty(required=True)
access_token = db.StringProperty(required=True) #fb OAUTH access token"""
if __name__ == '__main__':
app.run(debug=True)
| 4f54103ae7140626c522c0d7469af0e836fda893 | [
"Python"
] | 4 | Python | wtachau/IOU | fc0ac98f47e9f2680bb3158d341eaed2ab2ca4eb | 78162d1a7fa12d8d0b1a50a49cd366fe82ce7cc3 | |
refs/heads/master | <file_sep>interface Edicion {
editorial: string;
fecha: number;
}
class LibroBasico implements Edicion {
public id: number | string;
public editorial;
public fecha;
public autor: string;
public titulo: string;
constructor(autor, titulo) {
this.autor = autor;
this.titulo = titulo;
}
}
class LibroTecnico extends Libro {
public tema: string;
constructor(autor, titulo, tema) {
super(autor, titulo);
this.tema = tema;
}
mostrar() {
console.log(this);
}
}
const oLibroT = new LibroTecnico('Pepito', 'Angular Facil', 'Programacion');
oLibroT.mostrar();
<file_sep>interface Edicion {
editorial: string;
fecha: number;
}
class Libro implements Edicion {
public id: number | string;
public editorial;
public fecha;
constructor(public autor: string, public titulo: string) {}
}
const oLibro = new Libro('J.Pérez', 'Angular Facil');
// Al ser una clase sin métodos,
// es posible crear objetos literales que cumplan con el tipo
let oLibro2: Libro = {
id: '',
autor: '',
titulo: '',
editorial: '',
fecha: 0};
// También es posible usar los interfaces para definir tipos
let oEdicion: Edicion = {
editorial: '',
fecha: 0};
<file_sep>let aDatos = [{precio : 22}, {precio : 34}, {precio : 57} ]
calcularPrecios(12)
// calcularPrecios()
function calcularPrecios (pIva= 22) {
{
let nIva = 1 + (pIva/100)
let precioFinal
aDatos.forEach( elem => {
precioFinal = elem.precio * nIva
mostrarPrecio ( precioFinal)
})
console.log('El IVA aplicado ha sido ',nIva)
}
// console.log(nIva)
}
function mostrarPrecio (precioFinal) {
console.log(
`
El precio final es:
${precioFinal.toFixed(2)}`
)
}
| fff2e9cc7703196059ed8ae7381982e5fb2a8ffa | [
"JavaScript",
"TypeScript"
] | 3 | TypeScript | Alce-Angular-Courses/curso-kc | 2be778cbe17c30b1a20946956bedd800183805bd | 4cea48ec8c72592b3856f8ec821471c0a7c5f8b6 | |
refs/heads/master | <file_sep>
$(function(){
//js 호출시 바로 시작될 영역
user.init();
});
//DB연동시 삭제
var users = [];
var currentTime = new Date();
users.push({
email : '1',
password : '1',
name : '1',
job : '1',
joinDate : currentTime,
updateDate : currentTime
});
users.push({
email : '2',
password : '2',
name : '2',
job : '2',
joinDate : currentTime,
updateDate : currentTime
});
var user = {
$el : {},
init : function(){
this.$el = $('.container');
this.$el.find('#btnSignUp').click(function(){
user.showModal();
});
this.$el.find('#btnClose').click(function(){
user.closeModal();
});
this.$el.find('#btnSubmit').click(function(){
user.signUp();
});
this.$el.find('#btnLogin').click(function(){
user.login();
});
},
showModal : function(){
this.resetModal();
this.$el.find('#userModal').modal();
},
closeModal : function(){
this.$el.find('#userModal').modal('hide');
},
resetModal : function(){
this.$el.find('.signForms').val('');
},
signUp : function(){
var email = this.$el.find('#inputEmail').val(),
password = this.$el.find('#inputPassword').val(),
passwordConfirm = this.$el.find('#inputPasswordConfirm').val(),
name = this.$el.find('#inputName').val(),
job = this.$el.find('#inputJob').val();
// 1. 입력창에 빈칸은 없는가?
// 빈칸이 있으면 해당 입력창에 강조효과 (empty 클래스) 추가
//
// 저장, 실패 경고창
if(!this.validate()){
alert("모든 항목을 입력해주세요");
return;
}
// 2. password와 passwordConfirm이 같은가? 다르면 패스워드 확인 경고창
if(password !== passwordConfirm){
alert("같은 패스워드를 입력해주세요");
return;
}
// 3. 이미 등록된 사용자가 아닌가?
// find 함수는 email 중복되는지 체크하고 같은 이메일이 있다면 true, 없다면 false
this.save({
email : email,
password : <PASSWORD>,
name : name,
job : job,
joinDate : currentTime,
updateDate : currentTime
});
},
validate : function(){
var $signForms = this.$el.find('.signForms'),
result = true;
$.each($signForms, function(index, signForm){
var $signForm = $(signForm); // jQuery 함수 사용을 위해 $가 필요
if(!$signForm.val()){
$signForm.addClass('empty');
result = false;
}else{
$signForm.removeClass('empty');
}
});
return result;
},
// DB 연동시 수정
// 사용안함
find : function(obj){
var result;
var _ = this;
$.ajax({
method: 'POST',
url: 'email',
data: obj,
dataType: 'json',
success: function(data){
alert(data.status);
if(!data.status){
_.save(obj);
_.closeModal();
}else{
alert('이미 가입된 사용자입니다.');
}
}
});
},
// DB 연동시 수정
save : function(obj){
var _ = this;
$.ajax({
method : 'POST',
url : 'user',
data : obj,
dataType : 'json',
success : function(data){
if(data.status){
alert('등록 되었습니다.');
_.closeModal();
}else{
alert('이미 가입된 사용자입니다.');
}
}
});
},
// 입력창에 입력된 email과 password를 검사해서 일치하면 로그인 alert
// 아니면 email & password 확인 alert
login : function(){
var email = this.$el.find('#loginEmail').val(),
password = this.$el.find('#loginPassword').val();
/*
1. ajax로 email과 패스워드 전송
2. 해당 회원이 존재 && 패스워드 일치 alert(성공)
- 없으면 alere(확인하라고)
*/
$.ajax({
method : 'POST',
url : 'login',
data : {
email : email,
password : <PASSWORD>
},
dataType : 'json',
success : function(data){
if(data.status){
location.href=location.origin+"/board/list";
}else{
alert('이메일과 비밀번호를 확인해주세요.');
}
}
});
}
}<file_sep># 기간 : 2015.07.11 ~
# 시간 : 매주 토요일 9:00 ~ 12:00
# 장소 : 신촌역 윙스터디
# 교재 : jQuery & Bootstrap 을 이용한 반응형 웹사이트 제작
# 멤버 :
- 이동욱(주최자)
- 이바우
- 김태영
- 전옥현
- 신윤아
<file_sep>$(function(){
print();
});
var print = function(){
$('#target').keyup(function(){
$('#printArea').text($('#target').val());
});
} | 3fe3dc9ed5dce16d826ff7851473ecfc2365e91e | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | tykim89/jquery-study | 63b544505347e48f86dcfa46003daa69fd24d172 | 5d139110da5d62d16f1f0023dae1ffd8f8b2cbaa | |
refs/heads/master | <repo_name>softwaregravy/sns_endpoint<file_sep>/lib/sns_endpoint.rb
require "rubygems"
require 'bundler/setup'
require "sns_endpoint/version"
require 'sinatra'
require 'json'
require 'message'
module SnsEndpoint
class << self
attr_accessor :topics_list, :subscribe_proc, :message_proc
end
def self.setup(&block)
yield self
end
class Core < Sinatra::Base
post '/' do
json = JSON.parse(request.body.read)
sns = SnsEndpoint::AWS::SNS::Message.new json
if sns.authentic?
if sns.type == :SubscriptionConfirmation
if SnsEndpoint.topics_list.include? sns.topic_arn
HTTParty.get sns.subscribe_url
SnsEndpoint.subscribe_proc.call(json)
end
elsif sns.type == :Notification
SnsEndpoint.message_proc.call(json)
end
end
end
end
end
<file_sep>/README.md
# SnsEndpoint
Simple gem containing Sinatra engine, designed to help you utilize SNS http post service.
Use inside Rails (as an engine) or outside - as a standalone application.
## Installation
Add this line to your application's Gemfile:
gem 'sns_endpoint'
And then execute:
$ bundle
Or install it yourself as:
$ gem install sns_endpoint
## Usage
Configure using SnsEndpoint.setup, providing block, like this:
```ruby
SnsEndpoint.setup do |config|
config.topics_list = ['first_topic', 'second_topic'] #list of topics that endpoint should respond to subscription request
config.subscribe_proc = Proc.new { |message| p message } #proc that should be executed when subscribe request got received and responded to, passed argument is message (json object)
config.message_proc = Proc.new { |message| p message } #proc that should be executed when message got send to endpoint
end
```
for example in Rails initializer.
Use as Rails engine:
* mount in routes.rb:
```ruby
mount SnsEndpoint::Core => "/sns_endpoint"
```
Use as a standalone app:
Example script:
```ruby
require 'rubygems'
require 'sns_endpoint'
SnsEndpoint.setup do |config|
config.topics_list = ['first_topic', 'second_topic'] #list of topics that endpoint should respond to subscription request
config.subscribe_proc = Proc.new { |message| p message } #proc that should be executed when subscribe request got received and responded to, passed argument is message (json object)
config.message_proc = Proc.new { |message| p message } #proc that should be executed when message got send to endpoint
end
SnsEndpoint::Core.run!
```
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Added some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
| c5ef34e1fbf1d1a8f93ad0064fb88c25eea59510 | [
"Markdown",
"Ruby"
] | 2 | Ruby | softwaregravy/sns_endpoint | e4795b71f5f32cff8decff3a3ae660f1523c204c | 8cb7c758b3833002c426e1655965bd4bd4988a9f | |
refs/heads/master | <file_sep>---
title: "03-EJ-manipulacion"
output: html_document
---
En esta sección trabajaremos con el paquete dplyr para modificar datos. Usaremos los el dataset decisiones Perú.
Los cargamos en este bloque de código
```{r}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
```
## Tu turno 1
Te dieron una tabla con datos de temperatura mínima y máxima para distintas estaciones meteorológicas de todo el país durante los 365 días de un año. Las columnas son: id_estacion, temperatura_maxima, temperatura_minima y provincia. En base a esos datos, te piden que computes la temperatura media anual de cada estación únicamente de las estaciones de Cusco.
¿En qué orden ejecutarías estos pasos para obtener el resultado deseado?
- usar `summarise()` para calcular la estadística mean(temperatura_media) para cada id_estacion
- usar `group_by()` para agrupar por la columna id_estacion
- usar `mutate()` para agregar una columna llamada temperatura_media que sea
`(temperatura_minima + temperatura_maxima)/2`
- usar `filter()` para seleccionar solo las filas donde la columnas provincia es igual a "Cusco"
## Tu turno 2: verbos `select` y `filter`
1. Seleccionar solo las variables Anio, `Codigo Pais Origen`
2. Filtrar las entradas de los países Turquía y Argentina
```{r}
decisiones_2 <- select(decisiones, c(Anio, `Codigo Pais Origen`))
# la barra vertical es un "o"
decisiones_3 <- filter(decisiones, `Codigo Pais Origen` == "ARG" | `Codigo Pais Origen` == "TUR")
```
## Tu turno 3: verbo `mutate`
1. Generar una nueva columna que indique para cada fila si fueron rechazados mas pedidos que los aceptados
2. Generar una nueva columna que indique para cada fila si el tipo de procedimiento es UNHCR
3. Generar una nueva columna con la cantidad de pedidos rechazados o con proteccion complementaria
```{r}
decisiones_ind_1 <- mutate(decisiones, indicador = as.numeric(Rechazadas > Reconocidas))
decisiones_ind_2 <- mutate(decisiones, indicador = `Nombre del Procedimiento` == "UNHCR")
decisiones_ind_3 <- mutate(decisiones, indicador = Rechazadas + `Proteccion Complementaria`)
```
## Tu turno 4: verbos `dplyr`
1. Obtener una tabla con la cantidad de pedidos de asilo rechazados por Perú durante cada año
2. Ordena esta tabla de mayor a menor según la cantidad de pedidos rechazados
3. Que cantidad de pedidos fueron rechazados en total durante el período estudiado?
```{r}
decisiones_rechazados_peru <- decisiones %>%
group_by(Anio) %>%
summarise(total_anio_rech = sum(Rechazadas)) %>%
arrange(desc(total_anio_rech))
total_rechazadas <- decisiones %>%
summarise(total = sum(Rechazadas))
```
<file_sep>2 + 2
sqrt(2)
# creando variables
raiz_dos <- sqrt(2)
mi_primera_variable <- sqrt(2)
# los variables no se actualizan automaticamente como en Excel
x <- 2
y <- x + 2
x<-3
y <- x + 2
vector1 <- c(3, 5, 2)
vector1
vector2 <- 5:50
vector2
mi_media <- mean(vector2)
vector1 <- c(3, 5, 6.1, 7)
vector1[2]
vector1[4]
vector1[-4]
vector7 <- c(2, "hola")
vector7
lista1 <- list(2, "hola")
mi_agenda <- list(Carlos = c(tel = 41232342),
Maria = c(tel = 25362819, edad = 25))
mi_agenda$Carlos
library("readr")
install.packages("readr")
library("tidyverse")
?sum
Vector1 # error
# puedo especificar el paquete al que pertenece la funcion
stats::filter()
dplyr::filter()
filter()
# hola soy violeta
mes <- 1:30
library(readr)
decisiones_asilo_peru <- read_csv("data/decisiones_asilo_peru.csv")
View(decisiones_asilo_peru)
str(decisiones_asilo_peru)
<file_sep>---
title: "R Markdown"
output: html_document
---
Esto es un archivo RMarkdown. Contiene texto plano intercalado con bloques de código grises. Puedes usar este archivo para tomar notas y correr código. Por ejemplo, puedes escribir tu nombre en la línea de abajo (línea 7). Pruébalo:
```{r}
# Puedes escribir comentarios en los bloques de código.
# Dentro del código, "#" significa que empieza un comentario
# y lo que viene luego de "#" no se ejecuta.
# Este bloque usa la función plot (que ya viene en R)
# y el dataset cars para hacer un gráfico.
# Para correr el código, haz click en el botón
# play verde, arriba a la derecha del bloque. Pruébalo
plot(cars)
```
Buen trabajo! Los resultados del código aparecen justo luego del bloque. Puedes presionar x sobre los resultados para cerrarlos.
# Agregar bloques
Para insertar un nuevo bloque, presiona el botón *Insert* en la parte superior de este documento hacia la derecha, luego selecciona *R*. R Markdown agregará un bloque vacío en donde se encuentre el cursor.
Prueba insertar un nuevo bloque a continuación:
Buen trabajo! Durante el taller ubicaremos todos los códigos R dentro de bloques de código.
```{r}
# A veces, querrás correr solo parte del código
# de un bloque. Para eso, selecciona el código que quieras
# correr y presiona Ctrl+Enter (o Command si Mac)
# Si no seleccionas ningún código, R correrá la parte del
# código en la que se encuentre el cursor. Pruébalo
# Corre mean(1:5) pero no la línea que se encuentra debajo.
mean(1:5)
warning("No debes correr esto!")
```
```{r}
# Puedes presionar la flecha apuntando hacia abajo para correr
# todos los bloques anteriores a este. Es útil para cuando en
# el bloque actual usas variables definidas en otros bloques.
# Sys.Date()
```
En el bloque anterior solo hay comentarios. Por eso, si lo corres nada pasará.
Remueve el `#` de la ultima linea del bloque anterior y corre de nuevo el bloque. Puedes decir que hace la función `Sys.Date()`?
# Formateo de texto
Prueba un poco de formateo. Escribe a continuación una lista de ingredientes de tu postre favorito. Resalta con negritas los ingredientes más importantes.
# Reports
Cuando presionas el botón `knit` en la parte superior del archivo RMarkdown, R Markdown genera un output pulido de tu reporte. RStudio abrirá una copia del reporte por ti. Por ahora, solo trabajaremos con archivos HTML. Prueba presionar *Knit*!
Buen trabajo!
# Paquetes de R
Descomenta (borra el `#` del principio del bloque) la línea de código del siguiente bloque. Presiona *Knit* nuevamente
```{r}
#ggplot(data = diamonds) + geom_point(aes(x = carat, y = price))
```
Obtenemos un error. Por que crees que se produce?<file_sep>---
title: "Manipulacion I"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Manipulación de datos I", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> Abrir el archivo 03-EJ-manipulacion.Rmd")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Para ir haciendo los EJ", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
---
<div class="my-header"></div>
## Manipulación de datos
El paquete `dplyr` provee una enorme cantidad de funciones útiles para manipular datos
<center> <img src="img/hex-dplyr.png" alt="Hex stickers" height="100"></center>
Las funciones más comunes:
- `select()`: selecciona columnas de una tabla
- `filter()` y `slice()`: selecciona (o filtra) filas de una tabla
- `mutate()`: agrega nuevas columnas a una tabla
- `arrange()`: ordena las filas según los valores de una columna
- `group_by()`: agrupa una tabla en base al valor de una o más columnas
- `summarise()`: calcula estadísticas para cada grupo de una tabla.
---
<div class="my-header"></div>
## `dplyr` y tablas dinámicas
A rasgos generales, las operaciones de `dplyr` permiten hacer lo que se hace en tablas dinámicas (pivot tables) en Excel.
.pull-left[
Funcion de `dplyr`
- `filter()`
- `group_by()`
- `select()`
- `summarise()`
]
.pull-right[
Sección de tabla dinámica
- "Filtros"
- "Filas"
- "Columnas"
- "Valores"
]
---
<div class="my-header"></div>
## Tu turno 1
Te dieron una tabla con datos de temperatura mínima y máxima para distintas estaciones meteorológicas de todo el país durante los 365 días de un año. Las columnas son: id_estacion, temperatura_maxima, temperatura_minima y provincia. En base a esos datos, te piden que computes la temperatura media anual de cada estación únicamente de las estaciones de Cusco.
¿En qué orden ejecutarías estos pasos para obtener el resultado deseado?
- usar `summarise()` para calcular la estadística mean(temperatura_media) para cada id_estacion
- usar `group_by()` para agrupar por la columna id_estacion
- usar `mutate()` para agregar una columna llamada temperatura_media que sea
`(temperatura_minima + temperatura_maxima)/2`
- usar `filter()` para seleccionar solo las filas donde la columnas provincia es igual a "Cusco"
---
<div class="my-header"></div>
## Los datos
Retomamos el dataset sobre decisiones de Perú frente a los pedidos de asilo
(es una traducción y simplificación del dataset de UNHCR sobre decisiones de Perú: [https://data.humdata.org/dataset/unhcr-population-data-for-per](https://data.humdata.org/dataset/unhcr-population-data-for-per))
```{r, message=FALSE}
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones
```
---
<div class="my-header"></div>
## Seleccionando columnas: función `select()`
Si nos interesan menos columnas podemos usar `select()` así:
> `select(dataset, c(columna1, columna4))`
o por número de columnas `select(dataset, c(1, 4))`. También podemos excluir un conjunto de columnas:
> `select(dataset, -c(columna1, columna2))`
Supongamos que solo nos interesan: año, país de origen, decisión tomada(4)
Entonces redefinimos el dataset de la siguiente forma
```{r,}
decisiones <- select(decisiones,
c(Anio, `Codigo Pais Origen`, Reconocidas, Rechazadas,
`Proteccion Complementaria`, `Cerradas de otra forma`))
decisiones
```
---
<div class="my-header"></div>
## Seleccionando filas: funcion `filter()`
Supongamos que solo nos interesan los pedidos provenientes de Venezuela
Podemos usar la función filter mas una condición lógica
> filter(dataset, condicion)
En este caso seria:
```{r}
decisiones_venezuela <- filter(decisiones, `Codigo Pais Origen`=="VEN")
decisiones_venezuela
```
---
<div class="my-header"></div>
## funcion `filter()`: condiciones
Las condiciones para `filter()` pueden expresarse en función de una columna del dataset o de un vector de longitud igual a la cantidad de filas del dataset.
Si quiero las decisiones tomadas después de 2010:
```{r}
decisiones_despues_2010 <- filter(decisiones, Anio > 2010)
```
Si quiero cualquier decisión menos las de los años 2010 y 2015:
```{r}
decisiones_2010_2015 <- filter(decisiones, !(Anio %in% c(2010, 2015)))
```
Si quiero solo las entradas de donde se rechazaron más que las que se aceptaron:
```{r}
decisiones_mas_rechazos <- filter(decisiones, Rechazadas > Reconocidas)
```
---
<div class="my-header"></div>
## Tu turno 2: verbos `select` y `filter`
1. Seleccionar solo las variables Anio, `Codigo Pais Origen `
2. Filtrar las entradas de los países Turquía y Argentina
---
<div class="my-header"></div>
## Creando nuevas variables: función `mutate()`
<center><img src="img/dplyr_mutate_es.png" alt="Hex stickers" height="450"></center>
---
<div class="my-header"></div>
## Creando nuevas variables: función `mutate()`
- Queremos agregar a `decisiones` una columna con el total de pedidos de asilo correspondientes a cada fila (mas allá de la decisión tomada).
- Podemos definirla como la suma de las posibles decisiones
```{r}
decisiones <- mutate(decisiones,
total = Reconocidas +
`Proteccion Complementaria` +
`Cerradas de otra forma` +
Rechazadas)
#View(decisiones)
```
---
<div class="my-header"></div>
## Tu turno 3: verbo `mutate`
1. Generar una nueva columna que indique para cada fila si fueron rechazados mas pedidos que los aceptados
2. Generar una nueva columna que indique para cada fila si el tipo de procedimiento es UNHCR
3. Generar una nueva columna con la cantidad de pedidos rechazados o con proteccion complementaria
---
<div class="my-header"></div>
## Ordenando los datos según valores: función `arrange()`
Si quiero ordenar una tabla según los valores de una o más variables puedo
> `arrange(dataset, columna)`
Por ejemplo, si quiero saber cuales fueron los años y países con mas pedidos de asilo
```{r}
arrange(decisiones, desc(Reconocidas))
```
---
<div class="my-header"></div>
## Combinando funciones
Queremos:
> Una tabla de las decisiones tomadas por Perú frente a pedidos de asilo, ordenada por la cantidad de total de pedidos por país cada año.
Podemos obtenerla así:
```{r}
decisiones_venezuela <- filter(decisiones, `Codigo Pais Origen` == "VEN")
decisiones_venezuela <- mutate(decisiones_venezuela,
total = Reconocidas +
`Proteccion Complementaria` +
`Cerradas de otra forma` + Rechazadas)
decisiones_venezuela <- arrange(decisiones_venezuela, desc(total))
```
Pero estamos repitiendo demasiadas veces la variable venezuela, existe una herramienta que nos ayuda a combinar funciones encadenadas:
<center>pipe (o tubo)</center>
---
<div class="my-header"></div>
## Combinando funciones
<img src="img/diagrama_pipe.png" width="900">
---
<div class="my-header"></div>
## Combinando funciones con pipe
<center>
<img src="img/estoesunpipe.png" alt="Hex stickers" width="250">
</center>
Si reemplazamos los tubos por esto `%>%` obtenemos el resultado deseado
```{r}
decisiones %>%
filter(`Codigo Pais Origen` == "VEN") %>%
mutate(total = Reconocidas + `Proteccion Complementaria` + `Cerradas de otra forma` + Rechazadas) %>%
arrange(desc(total))
```
---
<div class="my-header"></div>
**Encuesta:** [https://PollEv.com/multiple_choice_polls/pb0m4Fh9kGkLRYKSmIsCq/respond](https://PollEv.com/multiple_choice_polls/pb0m4Fh9kGkLRYKSmIsCq/respond)
Cómo modificarias `decisiones` para que contenga las decisiones correspondientes a
pedidos de colombian@s ordenadas por cantidad de rechazos?
a)
```{r, eval=FALSE}
decisiones <- decisiones %>%
filter(decisiones, `Codigo Pais Origen` == "COL") %>%
arrange(decisiones, desc(Rechazadas))
```
b)
```{r, eval=FALSE}
decisiones <- decisiones %>%
filter(`Codigo Pais Origen` == "COL") %>%
arrange(desc(Rechazadas))
```
c)
```{r, eval=FALSE}
decisiones %>%
filter(`Codigo Pais Origen` == "COL") %>%
arrange(desc(Rechazadas))
```
---
<div class="my-header"></div>
## Resumir datos: `group_by` y `summarise`
Para agrupar los datos usamos `group_by()`
Supongamos que queremos saber cuantos asilos aceptados en total corresponden a cada país de origen
.pull-left[
```{r, eval = FALSE}
decisiones
```
.

]
.pull-right[
```{r, eval = FALSE}
decisiones %>%
group_by(`Codigo Pais Origen`)
```

]
Los datos no se modifican, solo se agrega un indicador de que esta agrupado
---
<div class="my-header"></div>
## Resumir datos: `group_by` y `summarise`
Una vez que están agrupados queremos elegir como resumirlo
```{r}
decisiones %>%
group_by(`Codigo Pais Origen`) %>%
summarise(total_aceptado = sum(Reconocidas))
```
---
<div class="my-header"></div>
## Resumir datos: `group_by` y `summarise`
Cual es el TOP 3 de país de origen aceptado?
--
```{r}
decisiones %>%
group_by(`Codigo Pais Origen`) %>%
summarise(total_aceptado = sum(Reconocidas)) %>%
arrange(desc(total_aceptado))
```
---
<div class="my-header"></div>
## `summarise` por separado
También `summarise` solo
```{r}
decisiones %>%
summarise(total_aceptados = sum(Reconocidas))
```
---
<div class="my-header"></div>
## Tu turno 4: verbos `dplyr`
1. Obtener una tabla con la cantidad de pedidos de asilo rechazados por Perú durante cada año
2. Ordena esta tabla de mayor a menor según la cantidad de pedidos aceptados
3. Que cantidad de pedidos fueron rechazados en total durante el período estudiado?
4. Crea una nueva variable
---
<div class="my-header"></div>
## Resumen de esta sección:
- Seleccionamos variables con `select()`
- Filtramos filas con `filter()`
- Ordenamos tablas con `arrange()`
- Hacemos tablas resumen con `summarise()`
- Creamos nuevas variables con `mutate()`
- Hacemos operaciones por grupos con `group_by()`
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de Paola Corrales y Elio Campitelli
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
<file_sep>---
title: "Reportes Reproducibles en R"
author: "<NAME>"
date: "25/11/2019"
output:
ioslides_presentation:
logo: assets/img/lama_gtrm.png
css: test.css
transition: slower
widescreen: true
smaller: true
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = FALSE)
```
## ¿Qué podemos hacer con R? (I)
<br><br>
{width=103%}
**Fuente:** How to quickly produce statistical reports? The UNHCR "Cookbook" (2019).
<br>
## ¿Qué podemos hacer con R? (II)
{ width=98% }
## ¿Qué es RMarkdown?
### 1) Estructura unificada que permite combinar:
- Código de R
- Resultados
- Comentarios (texto)
### 2) Los documentos R Markdown:
- Son completamente reproducibles, se pueden regenerar automáticamente al actualizar tanto códigos R o datos empleados.
- Se pueden convertir en en diferentes formatos como: HTML, PDF, documentos Word, aplicaciones web interactivas, entre otros.
<br>
## Empecemos con RMarkdown!
### 1) File -> New File -> R Notebook
### 2) Guardar el archivo **.Rmd** en alguna carpeta o proyecto.
### 3) Seleccionar la opción **Knit** y se generará un archivo HTML.
<br>
## Título del documento
### Encabezado contiene algunos metadatos:
- title: "Mi primer reporte en R"
- author: "<NAME>"
- date: "25/11/2019"
- output: html_document
<br>
## Texto
### Encabezados
- `#`, encabezado de nivel 1
- `##`, encabezado de nivel 2
- `###`, encabezado de nivel 3
### Listas
- Listas con viñetas, usar `*` o `-` .
- Listas enumeradas, usar los números seguidos de un punto `1.`, `2.`, ...
### Cursiva y Negrita
- Para cursiva, el texto va entre asteriscos.
- Para negrita, el texto va entre dos asteriscos.
<br>
## Códigos R (*chunks*)
#### Botón Insert -> R

#### ¿Cómo controlamos un chunk?
- `eval = FALSE`, no se evalúa el código.
- `include = FALSE`, evalúa el código pero no muestra los resultados.
- `echo = FALSE`, no muestra el código, pero sí los resultados obtenidos.
- `message = FALSE`, evita mostrar mensajes en el documento.
- `warning = FALSE`, evita mostrar warnings en el documento.
#### Podemos combinar código en R con texto, usar comillas simples!
<br>
## Caso práctico
#### El flujo de personas con necesidades específicas ha aumentado drásticamente durante el mes de noviembre. Diariamente, **más de 2,000 personas esperan para poder realizar un ingreso regular a Perú**. Para responder a las necesidades de esta población, distintas organizaciones coordinan para brindar una **asistencia conjunta**. Para ello, están utilizando Kobo para hacer el registro de la asistencia entregada. Usted debe elaborar **varios reportes diarios** para informar sobre la respuesta a la emergencia.
<br/><br/>
#### Puede entrar al link de kobo [**aquí**.](https://enketo.unhcr.org/x/#4kXNmJjb)
<br>
## ¿Cómo conectar Kobo con R? (I)
Instalamos paquetes y funciones necesarias:
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
#Si es necesario, deberán instalar los paquetes
library(devtools)
library(httr)
library(jsonlite)
library(readr)
library(dplyr)
source_url("https://raw.githubusercontent.com/ppsapkota/kobohr_apitoolbox/master/R/r_func_ps_kobo_utils.R")
```
Credenciales de la cuenta:
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
kobo_user <- "ejemplor"
kobo_pw <- "<PASSWORD>!"
```
<br>
## ¿Cómo conectar Kobo con R? (II)
Identificamos el formulario que queremos cargar:
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
url <-"https://kobocat.unhcr.org/api/v1/data.csv"
d_formlist_csv <- kobohr_getforms_csv (url,kobo_user, kobo_pw)
d_formlist_csv <- as.data.frame(d_formlist_csv)
d_formlist_csv #ver
```
Cargamos el formulario!
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
url<- "https://kobocat.unhcr.org/api/v1/data/19694.csv"
data <- kobohr_getdata_csv(url,kobo_user,kobo_pw)
head(data) #resultado
```
<br>
## ¿Cómo podemos realizar consultas en R?
- Número de personas atendidas
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
nrow(data)
```
- Total de asistencia otorgada
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
sum(data$cantidad)
```
- Número de organizaciones humanitarias brindando asistencia:
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
length(unique(data$org))
```
<br>
## ¿Cómo podemos crear una tabla?
Primero debemos organizar la información que queremos mostrar!
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
#creamos la tabla
tabla1 <- data %>%
group_by(distribucion) %>%
summarise(Total = sum(cantidad)) %>%
mutate(Freq = Total / sum(Total))
#ordenamos la tabla de mayor a menor
tabla1 = tabla1[order(-tabla1$Total),]
tabla1
```
Usamos kable para visualizar nuestra información:
```{r echo = T, results = "hide", warning=FALSE, message=FALSE}
knitr::kable(tabla1)
```
<br>
## ¿Qué hemos aprendido?
<br>
### - Conectar Kobo con R.
### - Escribir un reporte con el paquete R Markdown :D.
### - Generar diferentes formatos de reportes reproducibles.
### - Entender la importancia de la **reproducibilidad** en el contexto humanitario!
<br>
## Recursos adicionales!
### - [**Sitio oficial de R Markdown**](https://rmarkdown.rstudio.com)
### - [**R Markdown: The Definitive Guide**](https://bookdown.org/yihui/rmarkdown/)
### - [**R Markdown Cheatsheet**](https://www.rstudio.com/resources/cheatsheets/)
### - [**R Markdown Reference Guide**](https://www.rstudio.com/wp-content/uploads/2015/03/rmarkdown-reference.pdf)
<br>
<br>
#
<br><br>
<br><br>
<br><br>
<br><br>
<center> <h2> Muchas gracias! </h2> </center>
<br><file_sep>---
title: "Proyectos y RMarkdown"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Proyectos y RMarkdown", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Proyectos", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Directorio de trabajo
R va a buscar en el "directorio de trabajo" por defecto cualquier archivo que le pidas leer (y va a guardarlo ahí también)
Puedes averiguar cual es:
```{r}
getwd()
```
Puedes cambiarlo también:
```{r, eval=FALSE}
setwd("C\\Users\\violeta\\mi_carpeta_favorita")
```
Pero existe una herramienta para no tener que lidiar con esto:
<center>`r emo::ji("star")`PROYECTOS`r emo::ji("star")`</center>
---
<div class="my-header"></div>
## Siempre vamos a trabajar en proyectos
Qué es?
- Carpeta que contiene TODOS los archivos relacionados a un proyecto
- .R (codigo en R)
- Imagenes (.png, .pdf, etc)
- Datos (.csv, .xls, etc)
Por qué es ventajoso trabajar en ellos?
- Es más ordenado
- Ayuda a trabajar con *paths* relativos y no absolutos
- Si quiero abrir un dato dentro de mi proyecto
> "mi_dato.csv"
en lugar de
> "C\\Users\\violeta\\mi_carpeta_favorita\\mi_dato.csv"
---
<div class="my-header"></div>
## Tu turno: Crea un nuevo proyecto en RStudio
1. Haz click en el menú “Archivo” (`File`) y luego en “Nuevo Proyecto” (`New Project`)
2. Haz click en “Nueva Carpeta” (`New Directory`)
3. Haz click en “Nuevo Proyecto” (`New Project`)
4. Escribe el nombre de la carpeta que alojará a tu proyecto, por ejemplo “mi_proyecto”
5. Haz click en “Crear Proyecto” (`Create Project`)
---
<div class="my-header"></div>
## Tu turno: Abre tu nuevo proyecto desde el explorador de archivos
1. Cierra RStudio
2. Desde el explorador de archivos, busca la carpeta donde creaste tu proyecto.
3. Haz doble click en el archivo que tiene el nombre de tu proyecto (y que termina con .Rproj) que encontrarás en esa carpeta.
---
<div class="my-header"></div>
## Estructura de un proyecto
- Carpeta que contiene TODOS los archivos relacionados a un proyecto
- Ordenado con sub-carpetas
- Subcarpetas que deberían estar si o si (hay que agregarlas)
- datos (o data)
- img
- En general hay una subcarpeta R para contener el código
- mi_proyecto/
- datos/
- dataset1.csv
- dataset2.xls
- img/
- foto1.jpg
- diagrama1.png
- R/
- limpiar_datos.R
---
<div class="my-header"></div>
## Manejo de un proyecto
Puedes crear nuevas carpetas para organizar tu proyecto en el explorador de carpetas o en la pestaña `Files` de la alacena de RStudio (abajo a la derecha)
Para guardar un archivo de código, haz como normalmente en Windows
> File->Save/Save as->Elegir normbre si corresponde + Guardar/Save
**Tu turno:**
1. Crea una carpeta llamada R dentro del proyecto creado
1. Crea un archivo con un vector con los numeros del 1 al 30
2. Guarda el vector en una variable llamada `mes`
3. Guarda el codigo con el nombre "mes.R"
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("RMarkdown", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## RMarkdown: combinar todo junto

<p style="color: gray; font-size:15px"> Ilustración adaptada de Allison Horst
</p>
---
<div class="my-header"></div>
## Una notebook RMarkdown

<p style="color: gray; font-size:15px"> Imagen adaptada del curso "Master the tidyverse" - <NAME> <br>
github.com/rstudio-education/master-the-tidyverse-instructors
</p>
---
<div class="my-header"></div>
## Tu turno: Crea una notebook RMarkdown
1. File > New File > RMarkdown...
2. Elegir un nombre del archivo
3. Elegir formato deseado
4. Guardar
Se genera una plantilla con un ejemplo, extensión del archivo es ".Rmd"
Para generar el output hay que apretar el botón Knit
<center><img src="img/knit.png" height="250"></center>
---
<div class="my-header"></div>
## Estructura de un .Rmd
Cualquier archivo de este tipo tiene 3 partes principales:
- El **encabezado** o **yaml** que determina generalidades como formato de salida, etc.
- El **texto** que puede estar a lo largo de todo el documento
- El **código en bloques** o **chuncks**
<center><img src="img/elementosRM2.png" alt="Hex stickers" height="325"></center>
---
<div class="my-header"></div>
## Markdown
Cuando escribimos el texto de nuestro archivo, podemos agregar cierta sintaxis para que el texto generado no se vea solo como un block de notas
- **negrita** usando dos asteriscos así: `**negrita**`
- *italizada* con un asterisco de cada lado: `*italics*`
lista de elementos utilizando guiones medios
``` r
- la negrita se consigue con dos asteriscos
- la italizada con un asterisco
- y para resaltar código se usa el acento grave `
```
---
<div class="my-header"></div>
## Markdown
También:
- Títulos con distinta jerarquía agregando `#` al comienzo. Esto además define secciones dentro del documento:
```
# Título
## El primer subtítulo
### Sub con menos jerarquia, etc.
```
- Link a una página externa: "[text to show]""(http://the-web-page.com)"
- Incluir una imagen: "!(http://url/for/file)"
---
<div class="my-header"></div>
## Posibles resultados finales de RMarkdown
- Word
- HTML
- PDF (se necesita instalar `tinytex::install_tinytex()`, no probar ahora! tarda mucho tiempo)
<center>
<img src="img/outputRMhtml.png" alt="Hex stickers" width="300">
<img src="img/outputRMword.png" alt="Hex stickers" width="300">
</center>
<center><img src="img/outputpdf.png" alt="Hex stickers" width="300"></center>
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Ahora es tu turno!", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("## <center>")
palabra = unlist(strsplit("Abre el archivo 01-EJ-RMarkdown.Rmd", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("Sigue las instrucciones", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de Paola Corrales y Elio Campitelli
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME><file_sep>---
title: "Tratar y procesar datos 5W"
author: "Equipo IM GTRM"
date: "27/11/2020"
output:
ioslides_presentation:
logo: assets/img/lama_gtrm.png
css: test.css
transition: slower
widescreen: true
smaller: true
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
```
## PUNTOS
* **Contexto**
<br>
* **Flujo de datos**
<br>
* **Importar y validar los datos**
<br>
* **Cálculos de totales sectoriales/intersectoriales**
<br>
* **Próximos pasos**
## 1. Contexto
El GTRM Perú se encarga de la **preparación y monitoreo del Plan de Respuesta para Refugiados y Migrantes** (RMRP) de Venezuela.
<br>
En este contexto, el equipo de IM del GTRM organiza:
- Un **Análisis Conjunto de Necesidades**;
- La definición de **prioridades y objetivos operacionales**;
- La preparación de un **Marco de Monitoreo** para estos objetivos;
- El **monitoreo** de la respuesta bajo este marco.
<br>
Las rondas mensuales de 5W representan este último paso. De una perspectiva de coordinación, es esencial poder monitorear y visibilizar la respuesta en curso.
## 2. Flujo de datos
El flujo de datos para la 5W es el siguiente:
* Los **socios del GTRM** reportan sus actividades de forma mensual a través de una matriz en línea.
{ width=30% }
<br>
* El **equipo de manejo de información**:
+ **Descarga** la matriz;
+ **Limpia y valida** los datos;
+ **Agrega** la información para obtener totales sectoriales e intersectoriales;
+ **Comparte** la información a través de dashboards y otros productos.
## 3. Importar y validar los datos
Se importan los datos y se hace una primera ronda de limpieza:
```{r eval=FALSE}
library(readxl)
library(plyr)
library(tidyverse)
library(openxlsx)
LIMA <- read_excel(path=DATA_path, sheet="Lima", skip = 2)
LIMA$Departamento <- "Lima"
LIMA[,14:28] <- lapply(LIMA[,14:28], as.numeric)
LIMA$Organización <- trimws(LIMA$Organización)
...
DATA_ALL <- bind_rows(LIMA, TUMBES, TACNA, OTROS_DEP)
DATA_ALL <- DATA_ALL %>%
filter(!is.na(Organización))
rm(LIMA, TUMBES, TACNA, OTROS_DEP)
```
## 3. Importar y validar los datos (2)
R también puede identificar errores en los datos enviados:
```{r eval=FALSE}
# El total de rangos de sexo/edad corresponde al total reportado?
DATA_ALL_CHECK <- DATA_ALL
DATA_ALL_CHECK$CA_SUM <- rowSums(DATA_ALL_CHECK[,14:17])
DATA_ALL_CHECK$CA_SUM_CHECK[DATA_ALL_CHECK$CA_SUM != DATA_ALL_CHECK$`TOTAL CA`] <- "ERROR"
# Se reportó información hasta el nivel Admin2?
DATA_ALL_CHECK$PROVINCE_REPORTED[DATA_ALL_CHECK$Provincia == ""] <- "ERROR"
# La lógica de Admin1/Admin2 es correcta?
DATA_ALL <- left_join(DATA_ALL, ADMIN2, by="Provincia")
DATA_ALL_CHECK$PROVINCE_CORRECT[DATA_ALL_CHECK$Departamento != DATA_ALL_CHECK$ADM1_ES.y] <- "ERROR"
# Exportar el log de errores para hacer seguimiento
error_log <- list("ERRORS" = DATA_ALL_CHECK)
write.xlsx(DATA_ALL_CHECK, file="ERROR_log.xlsx")
```
*Nota: Estos pasos se pueden evitar utilizando una plataforma con reglas de validación predefinidas.*
El script exporta un archivo Excel con todos los errores identificados. A través de este registro, es posible hacer un seguimiento sistemático con cada organización.
## 4. Calcular total de beneficiarios
Desde el marco de monitoreo, se puede hacer una distinción entre los beneficiarios reportados:
* **Totales que se pueden calcular** para el total sectorial:
+ Los beneficiarios se pueden sumar entre socios (no hay duplicación);
+ Los beneficiarios no se pueden sumar entre socios (riesgo de duplicación): Se toma el MAX;
* **Totales que no se puede calcular** para el total sectorial (por ejemplo, beneficiarios indirectos):
+ No se considera la cifras de beneficiarios para el total.
```{r eval=FALSE}
DepartmentActTotal = function(df) {
df1 = filter(df, `Rule_BNF` == "SUM") #### Si se cuentan y se puede sumar
df2 = ddply(df1[c(4, 7, 10, 15:29)], .(Departamento, Sector, Actividad), numcolwise(sum, na.rm=T))
df3 <- filter(df, `Rule_BNF` == "MAX") #### Si se cuentan y NO se puede sumar
df4 = ddply(df3[c(4, 7, 10, 15:29)], .(Departamento, Sector, Actividad), numcolwise(max, na.rm=T))
df5 <- filter(df, `Rule_BNF` == "N/A")#### Si NO se cuentan
df5[c(15:29)] <- 0
df6 = ddply(df5[c(4, 7, 10, 15:29)], .(Departamento, Sector, Actividad), numcolwise(sum, na.rm=T))
df7 = rbind(df2, df4, df6)) #### Juntando todos
return(df7)
}
DATA_ACT_Departamentos <- DepartmentActTotal(DATA_ALL)
```
## 4. Calcular total de beneficiarios (2)
El total de beneficiarios a nivel intersectorial se produce desde:
* Cálculo de totales sectoriales en una área geográfica;
* Selección del valor máximo entre los sectores para esta área **= total intersectorial local**
* Suma de los valores máximos entre todas las áreas geográficas **= total intersectorial nacional**
```{r eval=FALSE}
IntersectorTotal = function(df) {
df1 = ddply(df[c(1, 6:20)], .(Departamento), numcolwise(max, na.rm=T))
df2b = mutate(df2, `Pais` = "Nacional")
df3 = ddply(df[c(18, 3:17)], .(Pais), numcolwise(sum, na.rm=T))
return(df3)
}
TOTAL_Intersector <- DepartmentTotal(Total_Sector_Departamentos) # Intersector by department (all)
```
## 4. Calcular total de beneficiarios (3)
<br>
El total de beneficiarios se prepara para las siguientes categorías:
* Total geográfico (Admin1, Admin2, Admin3);
* Total sectorial;
* Total para actividades bajo el RMRP;
* Total para actividades relacionadas al COVID
Y otras. Y combinaciones de estas categorías.
<br>
En total, se exportan casi 30 diferentes matrices desde los datos de la 5W, sin incluir los formatos regionales y otros cálculos para totales acumulados.
## 5. Los próximos pasos
<br>
El trabajo con R al momento se enfoca únicamente sobre la **transformación y agregación de datos**.
En futuro, el equipo quiere incorporar R para los siguientes pasos:
* **Descarga** de datos desde plataforma en línea
* **Visualización** de información a través de reportes y otros productos
* **Upload de datos** en plataformas en línea
* **Compartir** el código en línea
El objetivo es asegurar un control de **todo el flujo de datos** de monitoreo de forma sistemática, automatizada y transparente con todos los socios del GTRM.
<file_sep>---
title: "Visualización"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Visualización de datos I", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> Abrir el archivo 04-EJ-visualizacion.Rmd")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Para ir haciendo los EJ", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
---
<div class="my-header"></div>
## Visualización de datos
<center><img src="img/ggplot2_obra_maestra.png" width="750"></center>
---
<div class="my-header"></div>
## Ejemplos:
```{r, echo=FALSE, fig.height=6, fig.width=9, fig.retina=3}
plot(1:20, 2 + 1.5 * 1:20 + rnorm(20))
```
---
<div class="my-header"></div>
## Ejemplos:
```{r, echo = FALSE, fig.height=6, fig.width=9, fig.retina=6}
ggplot(data = NULL, aes(1:20, 2 + 1.5 * 1:20 + rnorm(20))) +
geom_point() +
labs(x = "x", y = "y", title = "Mi gráfico", subtitle = "Es muy lindo") +
theme(plot.title.position = "plot", title = element_text(size = 20))
```
---
<div class="my-header"></div>
## Ejemplos:
<center><img src="img/mapa1_ejemplo.png" height="530"></center>
---
<div class="my-header"></div>
## Ejemplos:
<center><img src="img/ggplot2-carbonprint.png" height="530"></center>
---
<div class="my-header"></div>
## Los datos: campamentos en Haití
Ahora vamos a trabajar un poco con el dataset de los campamentos de Haití
```{r include=FALSE}
library(readxl)
url_haiti <- "https://data.humdata.org/dataset/27baf423-2d13-49a4-bd72-cb65d8f924da/resource/9f71ab56-8a9a-4df2-be5d-f26d3600a285/download/dtm-haiti-site-assessment-round-33.xlsx"
destfile <- "dtm_haiti_site_assessment_round_33.xlsx"
curl::curl_download(url_haiti, destfile)
haiti <- read_excel(destfile,
col_types = c("text", "text", "text",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"text", "text", "text", "text", "text",
"text", "text", "text", "text"))
haiti <- slice(haiti, -1)
```
```{r}
str(haiti)
```
---
<div class="my-header"></div>
## Los datos: campamentos en Haití
Variables:
- Comuna
- Nombre del campamento
- Latitud
- Longitud
- Cantidad de Viviendas
- Cantidad de Individuos
- Provisión de Agua
- Presencia de Sanitarios
- etc.
Que relación pensamos que debería haber entre Cantidad de Viviendas y Cantidad de Individuos?
---
<div class="my-header"></div>
## Tu turno 1: Mi primer gráfico
Corre este código en la notebook para generar el gráfico que queremos
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals))
```
---
<div class="my-header"></div>
## Código `ggplot2`

**Plantilla para cualquier gráfico:**

---
<div class="my-header"></div>
## Algunas propiedades estéticas posibles
- Ejes (x, y,...)
- color (color de borde o de cosas sin relleno como punto o línea)
- fill (color de relleno)
- shape (por ejemplo para dispersión, círculo, triángulo, equis)
- size (tamaño)
- alpha (transparencia)
## Según el tipo de variable y de gráfico
- numérica continua
- categórica
- texto
---
<div class="my-header"></div>
## Color
Supongamos que queremos distinguir además cuales de estos campamentos tienen Provisión de Agua
.pull-left[
<center><b>Espacio Visual</b></center>
<center>Color</center>
<br>
<center>
<p style="color:#F8766D">
rojo
</p>
</center>
<center>
<p style="color:#00BFC4">
azul</p>
</center>
]
.pull-right[
<center><b>Espacio de los datos</b></center>
<center>Provision de Agua</center>
<br>
<center>
<p>No</p>
</center>
<center>
<p>Yes</p>
</center>
]
---
<div class="my-header"></div>
## Mapear provisión de agua
```{r,}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals,
color = `Water Provision`))
```
La leyenda se agrega automáticamente
---
<div class="my-header"></div>
## Mapear provisión de agua
```{r,}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals,
shape = `Water Provision`))
```
La leyenda se agrega automáticamente
---
<div class="my-header"></div>
## Mapear provisión de agua
```{r,}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals,
size = `Water Provision`))
```
La leyenda se agrega automáticamente
---
<div class="my-header"></div>
## Cómo hacer este gráfico?
```{r, echo = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals),
color = "blue")
```
---
<div class="my-header"></div>
**Encuesta:** [https://PollEv.com/multiple_choice_polls/jr8bO8kgHIN3gXi4WBsSO/respond](https://PollEv.com/multiple_choice_polls/jr8bO8kgHIN3gXi4WBsSO/respond)
a)
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals),
color = "blue")
```
b)
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals),
color = "red")
```
c)
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals, color = "blue")
```
d)
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals, color = "red"))
```
---
<div class="my-header"></div>
## Como hacer este gráfico?
```{r, }
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals,
color = "blue"))
```
---
<div class="my-header"></div>
## Como hacer este gráfico?
Si quiero indicar una característica estética NO linkeada a variable, entonces lo indico AFUERA del aes()
```{r, }
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals),
color = "blue")
```
---
<div class="my-header"></div>
## Que tienen de distinto estos dos gráficos?
.pull-left[
```{r, echo=FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals))
```
]
.pull-right[
```{r,echo=FALSE, message=FALSE }
ggplot(data = haiti) +
geom_smooth(mapping = aes(x = Households,
y = Individuals))
```
]
---
<div class="my-header"></div>
## Que tienen de distinto estos dos gráficos?
.pull-left[
```{r, echo=FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals))
```
]
.pull-right[
```{r,echo=FALSE, message=FALSE }
ggplot(data = haiti) +
geom_smooth(mapping = aes(x = Households,
y = Individuals))
```
]
Mismos datos, mismo mapeo de aesthetics pero distinto geom
`geom_point` y `geom_smooth`
---
<div class="my-header"></div>
# Tu turno 2
Reproducir este histograma de la variable Households con la ayuda de la guía rápida de `ggplot2`. Usa `geom_histogram`
Ayuda: no utilizar la variable `y`
```{r,echo=FALSE}
ggplot(data = haiti) +
geom_histogram(mapping = aes(Individuals))
```
---
<div class="my-header"></div>
# Tu turno 3
Reproducir este gráfico de barras que grafica la cantidad de campamentos en cada comuna. Utiliza la ayuda de la guía rápida de `ggplot2`. Usa `geom_bar`
Ayuda: no utilizar la variable `y`
```{r,echo=FALSE}
ggplot(data = haiti) +
geom_bar(mapping = aes(x = Commune, fill = Commune))
```
---
<div class="my-header"></div>
## Gráficos de barras
```{r}
ggplot(data = haiti) +
geom_bar(mapping = aes(x = Commune, color = Commune))
```
---
<div class="my-header"></div>
## Gráficos de barras
```{r}
ggplot(data = haiti) +
geom_bar(mapping = aes(x = Commune, fill = Commune))
```
---
<div class="my-header"></div>
# Gráficos de barras
```{r}
ggplot(data = haiti) +
geom_bar(mapping = aes(x = Commune, fill = Commune)) +
coord_flip()
```
---
<div class="my-header"></div>
# Tu turno 4
Predice la salida del siguiente código
Luego, córrelo
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals)) +
geom_smooth(mapping = aes(x = Households, y = Individuals))
```
---
<div class="my-header"></div>
## Varias capas
Cada nuevo `geom_...` agrega una capa
```{r}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals)) +
geom_smooth(mapping = aes(x = Households, y = Individuals))
```
---
<div class="my-header"></div>
## Mapeos globales vs mapeos locales
Podemos mapear características estéticas para todos los geoms así
```{r}
ggplot(data = haiti, mapping = aes(x = Households, y = Individuals)) +
geom_point() +
geom_smooth()
```
---
<div class="my-header"></div>
## Mapeos globales vs mapeos locales
Podemos agregar mapeos locales así
```{r}
ggplot(data = haiti, mapping = aes(x = Households, y = Individuals)) +
geom_point(mapping = aes(color = `Water Provision`)) +
geom_smooth()
```
---
<div class="my-header"></div>
## Mapeos globales vs mapeos locales
Que sucede aquí?
```{r, message = FALSE}
ggplot(data = haiti, mapping = aes(x = Households, y = Individuals,
color = `Water Provision`)) +
geom_point() +
geom_smooth()
```
---
<div class="my-header"></div>
## Mapeos globales vs mapeos locales
Incluso, podemos hacer mapeos locales de datos:
```{r}
ggplot(data = haiti, mapping = aes(x = Households,
y = Individuals)) +
geom_point(aes(color = `Water Provision`)) +
geom_smooth(data = filter(haiti, Households < 500))
```
---
<div class="my-header"></div>
## Salvar gráficos
La función `ggsave` salva el último gráfico generado
Debemos especificar el nombre del archivo
En general indicaremos un path relativo
```{r, eval=FALSE}
ggsave("data/my-plot.pdf")
ggsave("data/my-plot.png")
```
El tamaño default en general no es muy bueno. Podemos indicar las dimensiones
```{r, eval=FALSE}
ggsave("data/my-plot.pdf", width = 6, height = 6)
```
Tu turno 5: Salva tu último gráfico en la carpeta img de tu proyecto
Si por algún motivo no estás en un proyecto, vuelve al proyecto del curso
---
<div class="my-header"></div>
## Más gráficos
Volvamos a la tabla de decisiones de pedidos de asilos en Perú
```{r, include=FALSE, message = FALSE}
decisiones <- read_csv("data/decisiones_asilo_peru.csv") %>%
mutate(total = Reconocidas + `Proteccion Complementaria` + `Cerradas de otra forma` + Rechazadas)
```
```{r}
decisiones
```
---
<div class="my-header"></div>
## Series de tiempo
Podemos graficar una variable a lo largo del tiempo para detectar tendencias, eventos y sus consecuencias
<center><img src="img/series_tiempo.png" height="460"></center>
---
## Series de tiempo
Podemos graficar una variable a lo largo del tiempo para detectar tendencias, eventos y sus consecuencias
<center><img src="img/grafico_autoras_mujeres.jfif" height="460"></center>
---
<div class="my-header"></div>
## Series de tiempo
Podemos graficar una variable a lo largo del tiempo con una línea
Utilizamos `geom_line`
```{r}
decisiones %>%
filter(`Codigo Pais Origen` == "VEN") %>%
ggplot() +
geom_line(aes(Anio, total))
```
---
<div class="my-header"></div>
## Para practicar luego:
Modifica el gráfico anterior para que la línea sea roja y cambia su grosor
```{r, echo = FALSE}
decisiones %>%
filter(`Codigo Pais Origen` == "VEN") %>%
ggplot() +
geom_line(aes(Anio, total), color = "red", size = 4)
```
---
<div class="my-header"></div>
## Gráficos por paneles
A veces es más claro si tenemos varios paneles, uno para cada valor de una variable categórica/discreta
```{r, echo = FALSE}
decisiones %>%
filter(`Codigo Pais Origen` %in% c("VEN", "COL", "CUB", "HTI")) %>%
ggplot() +
geom_line(aes(Anio, total,
color = `Codigo Pais Origen`)) +
facet_wrap(~ `Codigo Pais Origen`)
```
---
<div class="my-header"></div>
## Gráficos por paneles
Para eso agregamos una capa llamada `facet_*`
- `facet_wrap`
- `facet_grid`
Por default las escalas son las mismas
En caso de que tenga sentido cambiarlas podemos agregar `scales="free"` como opción
---
<div class="my-header"></div>
## Gráficos por paneles
Podemos obtener el gráfico anterior de la siguiente forma:
```{r, eval = FALSE}
decisiones %>%
filter(`Codigo Pais Origen` %in% c("VEN", "COL", "CUB")) %>%
ggplot() +
geom_line(aes(Anio, total, color = `Codigo Pais Origen`)) +
facet_wrap(~ `Codigo Pais Origen`,)
```
puedes indicar el numero de columnas y de filas
Incluso, podemos generar paneles basados en dos variables categóricas
```{r, eval = FALSE}
ggplot() +
geom_*(aes(...)) +
facet_grid(var1 ~ var2)
```
---
<div class="my-header"></div>
## Para practicar despues:
1. Con la ayuda de la guía rapida intenta reproducir este gráfico que indica con una línea punteada el año del importante terremoto en Haití
2. Quita la línea y modifícalo para que se vea en paneles
```{r, echo = FALSE}
decisiones %>%
filter(`Codigo Pais Origen` %in% c("VEN", "COL", "HTI")) %>%
ggplot() +
geom_area(aes(Anio, total,
fill = `Codigo Pais Origen`)) +
geom_vline(xintercept = 2010,linetype = 2)
```
---
<div class="my-header"></div>
## Combinando gráficos: `patchwork`
El paquete patchwork es muy útil para combinar gráficos
Al igual que los datos y los vectores, los gráficos también pueden guardarse en una variable
```{r}
g1 <- decisiones %>%
filter(`Codigo Pais Origen` == "HTI") %>%
ggplot() +
geom_area(aes(Anio, total), fill = "red") +
geom_vline(xintercept = 2010,linetype = 2)
```
```{r}
g2 <- ggplot(data = haiti) +
geom_bar(mapping = aes(x = Commune, fill = Commune)) +
coord_flip() +
theme(legend.position = "none")
```
```{r}
g3 <- ggplot(data = haiti) +
geom_point(mapping = aes(x = Households,
y = Individuals,
color = `Water Provision`))
```
---
<div class="my-header"></div>
## Combinando gráficos: `patchwork`
```{r}
#install.packages("patchwork")
library(patchwork)
g1 + g2
```
---
<div class="my-header"></div>
## Combinando gráficos: `patchwork`
```{r}
#install.packages("patchwork")
library(patchwork)
(g1 | g3) /
g2
```
---
<div class="my-header"></div>
## DESAFIO 2 para practicar después
1. Importa el dataset `dtm3-peru` (cuidado con la primera fila, salteala)
[https://data.humdata.org/dataset/dad01c41-adde-46ee-998d-a6baf9eeb8d1/resource/538975d3-f7bb-4f0d-9111-58f7e00ee4f1/download/dtm3_version_publica_1209-1.xlsx](https://data.humdata.org/dataset/dad01c41-adde-46ee-998d-a6baf9eeb8d1/resource/538975d3-f7bb-4f0d-9111-58f7e00ee4f1/download/dtm3_version_publica_1209-1.xlsx)
2. Cuántas personas respondieron la encuesta?
3. Haz un gráfico que represente la distribución del "Número de Familias que viven en la misma carpa o techo"
5. Representa con la ayuda de la guía rápida un gráfico de barras lado a lado del "Número de Familias que viven en la misma carpa o techo" que discrimine según distrito
---
<div class="my-header"></div>
## Resumen
- elegir un dataset
**data**
- elegir un gráfico adecuado
**geom_**
- mapear las variables con las propiedades estéticas
**aes()**
---
<div class="my-header"></div>
## Para seguir
Puedo hacer muuuchas cosas con `ggplot2`
- [R para Ciencia de Datos](https://es.r4ds.hadley.nz/)
- [ggplot2: elegant graphics for data analysis](https://ggplot2-book.org/)
- [Data Visualization - A practical introduction](https://socviz.co/)
Un poco más sobre como arreglar gráficos para publicar:
- Títulos y otros textos
- Etiquetas
- Temas
- Colores
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y <NAME>
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
<file_sep>---
title: "Nuestras propias funciones"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Nuestras propias funciones", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Mini introducción a funciones
- Si te encuentras una y otra vez copiando la misma linea de código para realizar una operación entonces es hora de escribir una función que haga esa tarea por ti.
- Hasta ahora estuvimos utilizando funciones que vienen con R (lo que llamamos R base) y funciones de paquetes que están disponibles para extender las funcionalidades de R.
- función: conjunto de comandos o líneas de código definida para usarla en el futuro, con un nombre (uno que represente la acción u operación para la cual la estamos creando), elementos de entrada y el resultado final.
---
<div class="my-header"></div>
## Definición de funciones
Supongamos que una medica desea efectuar una evaluación del índice de masa corporal en los adultos de un campamento. En base a esto se planearan acciones.
$IMC = peso (kg) / altura^2 (metros)$
```{r}
IMC <- function(peso, altura) {
valor_IMC <- peso / altura^2
return(valor_IMC)
}
```
Al correr estas lineas de código se generará un nuevo elemento en nuestro entorno (mesada). La función está lista para ser usada. Pero antes de probarla revisemos la receta para definir una función.
**function** indica que es una función y las **llaves delimitan** el cuerpo de la función. La función puede recibir uno o más elementos de entrada: los argumentos. En este caso recibe 2.
---
<div class="my-header"></div>
## Usando la función
Ahora probemos calcular mi índice de masa corporal utilizando la función.
```{r}
IMC(60, 1.65)
```
Los elementos de entrada en este caso son 2 números (60 y 1.65) y nos devolvió un número. Probemos con dos vectores
```{r}
peso_adultos <- c(50, 60, 45, 70, 100)
altura_adultos <- c(1.65, 1.66, 1.6, 1.61, 1.7)
IMC(peso_adultos, altura_adultos)
```
- Es importante respetar el orden!
```{r}
IMC(altura_adultos, peso_adultos)
```
- Los vectores deben tener la misma longitud!
---
<div class="my-header"></div>
## Reflexiones sobre funciones
- Definir funciones nos abre todo un mundo de posibilidades `r emo::ji("earth")`
- Cuando tengamos varias funciones definidas para trabajar en un mismo proyecto podemos hacer nuestro propio paquete! `r emo::ji("gift")`
- Y si pensamos que le puede servir a otras personas podemos publicarlo para que sea accesible! Incluso ell@s pueden ayudarme a arreglar y extender mi paquete incorporando nuevas funcionalidades.
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y <NAME>
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
<file_sep>library(readxl)
DB <- read_excel("C:/Users/OIM/Desktop/Taller R - IMWG/DB.xlsx")
head(DB)
attach(DB)
str(DB)
summary(Edad)
hist(Edad)
plot(Edad, Cantidad_dias)
plot(Edad, Cantidad_dias, pch=20)
###exporting graphics
jpeg("test.jpeg") #opens a device
plot(Edad, Cantidad_dias)
dev.off() #closes the graphic device/ must do this
library(ggplot2)
# aesthetics
gr0 <- ggplot(DB, aes(x = Sexo))
gr0
# barplot
gr1 <- ggplot(DB, aes(x = Sexo)) + geom_bar()
gr1
# reduciendo el ancho de las barras
gr1 <- ggplot(DB, aes(x = Sexo)) + geom_bar(width=0.5)
gr1
# coloreando
gr1 <- ggplot(DB, aes(x = Sexo)) + geom_bar(width=0.5, colour="black", fill="blue") ##colour = border #fill=interior
gr1
# añadiendo etiquetas
gr1 <- ggplot(DB, aes(x = Sexo)) +
geom_bar(width=0.5, colour="black", fill="blue") +
geom_text(aes(label=..count..),stat="count",vjust = -.25)
### change vjust por position=position_stack(0.5))
## limites a los gráficos
gr1 + scale_y_continuous(limit = c(0, 300))
#añadiendo capas para nombre de etiquetas
gr1 + xlab("Genero") + ylab ("Número de casos")# etiquetas de los ejes
#añadiendo titulos
gr1 + xlab("Sexo") + ylab ("Número de casos") +
ggtitle("Sexo del entrevistado")
#temas de fondo
gr1 + xlab("Sexo") + ylab ("Número de casos") +
ggtitle("Sexo del entrevistado") + theme_light() #theme_classic #theme_bw
#points
points<- ggplot(DB, aes(x = Edad, y = Cantidad_dias)) +
geom_point(aes(colour = Sexo))
points
#time-series
time<- ggplot(DB, aes(x = Fecha_llegada, y = Dinero_disponible)) +
geom_line() +
labs(title = "Dinero disponible por fecha de llegada",
x = "Fecha de llegada",
y = "Dinero disponible")
time
##con porcentajes
##Ejercicio
library(scales)
gr2 <- ggplot(DB, aes(x = Sexo)) +
geom_bar(aes(y = (..count..)/sum(..count..))) +
scale_y_continuous(labels=scales::percent, limits = c(0,1))
##Otros ejercicios
#histograma
hist <- ggplot(DB, aes(Edad)) + geom_histogram(binwidth = 10,fill="red", colour="black")
#adicional
hist + facet_grid(Sexo ~.)
#curva de densidad
dens<-ggplot(DB, aes(Edad)) + geom_line(stat="density") + facet_grid(Sexo ~.)
dens
#juntando ambas curvas
#geom_line
dens2<-ggplot(DB, aes(Edad, colour=Sexo)) +
geom_line(stat="density") +
xlim(18, 100)
dens2
#geom_density
dens3<-ggplot(DB, aes(Edad, colour=Sexo)) +
geom_density()
dens3
#paquetes de ayuda
#1. esquisse
install.packages("esquisse")
library(esquisse)
esquisse::esquisser()
<file_sep>---
title: "Introducción al tidyverse"
author: "<NAME>"
date: "25/11/2019"
output:
ioslides_presentation:
logo: assets/img/lama_gtrm.png
css: test.css
transition: slower
widescreen: true
smaller: true
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = FALSE)
```
# Transformación de datos en R
Introducción al tidyverse!
## Qué es el tidyverse?
El "tidyverse" es un grupo de paquetes utilizado (sobre todo) para la manipulación de datos en R.

<br>
<br>
Esta sesión se concentrará solo sobre algunas funcionalidades del tidyverse, en particular:
- **tidyr**
- **dplyr**
## Qué vamos a aprender
- **Importar** datos
- **Sumar y agregar** datos
- **Filtrar** valores
- **Seleccionar** variables
- **Cambiar formato** de datos (ancho-largo)
- **Crear** nuevas variables
- **Combinar** tablas
## Empezamos!
- Creamos un nuevo archivo script: File > New File > R Script
- Guardarlo como "20191125_Ejercicios R"

<br>
- Esto será el script para nuestra sesión. Guardarlo periodicamente!
## Instalar tidyverse
Antes de empezar, necesitamos instalar tidyverse (y un otro paquete para importar archivos Excel). Podemos ingresar este código en la console:
```{r}
install.packages("tidyverse")
install.packages("readxl")
```
En nuestro script vamos a poner el código siguiente:
```{r}
library("tidyverse")
library("readxl")
```
## Importar datos en R
**Para nuestro ejercicio vamos a utilizar los datos del RMRP 2020 Perú. **
Los datos son en un archivo Excel con dos pestañas:
- **Actividades planeadas** por los socios a nivel de departamento y de sector;
- **Proyecciones** de población total y cálculo de **personas con necesidades**;
Para importar los datos, vamos a utilizar la función "read_excel" y guardarlos en dos tablas:
```{r}
Actividades <- read_excel(path="Datos/20191125_RMRP_Datos.xlsx", sheet="Actividades")
Proyecciones <- read_excel(path="Datos/20191125_RMRP_Datos.xlsx", sheet="Población")
```
## Visualizar los datos
Podemos utilizar la función "dim" para tener una idea de las dimensiones de nuestros datos:
```{r}
dim(Actividades)
dim(Proyecciones)
```
Utilizamos función "str" para ver la estructura de los datos:
```{r}
str(Actividades)
str(Proyecciones)
```
La función "summary" nos permite de ver más detalles:
```{r}
summary(Actividades)
summary(Proyecciones)
```
## Armonizar formatos de tablas
Las tablas incluyen datos sobre número de actividades y proyecciones poblacionales a nivel de departamento.
<br>
**Sin embargo, los dos formatos son diferentes!**
<br>
- En Proyecciones, cada departamento es una fila única;
- En Actividades, los departamentos se repiten para cada organización y cada sector.
<br>
Para conducir un análisis, queremos **armonizar los formatos y juntar las tablas.**
## Antes de empezar: cómo hacer un tubo!
Ya conocemos cómo formular las funciones de R y asignar el valor a una nueva variable:
```{r}
Var2 <- función(Var1)
```
Pero cómo hacemos si queremos aplicar dos o más funciones consecutivas a una variable?
```{r}
Var2 <- función1(Var1)
Var3 <- función2(Var2)
Var3 <- función3(Var3)
...
```
Esto es un proceso **ineficiente**.
## Antes de empezar: cómo hacer un tubo! (2)
El paquete "dplyr" del tidyverse nos permite juntar nuestros pasos a través del operador " %>% ":
```{r}
Var2 <- Var1 %>%
función1 %>%
función2
```
{ width=50% }
Pueden añadir este operador con ctrl + shift + M.
## Sumar y agregar datos
Primero, queremos agregar los datos a nivel de sector y departamento (sin considerar las organizaciones).
Para sumar los valores, utilizamos la función "summarise":
{ width=50% }
## Sumar y agregar datos (2)
En nuestro script:
```{r}
Actividades_Agregadas <- Actividades %>%
group_by(Departamento, Sector) %>%
summarise(Actividades_sum = sum(Actividades))
```
Hemos creado una nueva tabla "Actividades_Agregadas" con valores sumados para cada departamento y sector!
## Filtrar valores
En mi tabla, hay valores para actividades nacionales, donde no se específican los departamentos.
Para nuestro ejercicio, no queremos considerar estas actividades porqué queremos comparar los departamentos.
Los podemos filtrar a través de la función "filter":
{ width=50% }
<br>
En nuestro script:
```{r}
Actividades_Agregadas <- filter(Actividades_Agregadas, Departamento != "Nacional")
```
## Cambiar estructura de la tabla
Aún no podemos comparar bien nuestras tablas. Queremos una fila única para cada departamento!
La función "spread" nos permite hacer justamente esto:
{ width=80% }
En nuestro script:
```{r}
Actividades_Agregadas <- Actividades_Agregadas %>%
spread(Sector, Actividades_sum)
```
Ahora nuestra tabla tiene una fila para cada departamento.
## Seleccionar variables
Nuestra nueva tabla "Actividades_Agregadas" contiene columnas para cada sector. Podemos seleccionar las columnas que nos interesan a través de la función "select":
{ width=40% }
## Seleccionar variables
Para hacer nuestro análisis más sencillo, vamos a enfocarnos solamente sobre los sectores de **Protección** e **Integración**:
```{r}
Actividades_Agregadas <- select(Actividades_Agregadas, Departamento, Protección, Integración)
```
## Añadir nuevas columnas
Antes de combinar nuestras tablas, queremos tener una columna con el **total de actividades en cada departamento**.
La función "mutate" nos permite hacer esto:
{ width=60% }
<br>
En nuestro script:
```{r}
Actividades_Agregadas <- Actividades_Agregadas %>%
replace(is.na(.), 0) %>%
mutate(Sum_Act = Protección + Integración)
```
## By the way..
Gracias al operador " %>% ", todos estos pasos pueden ser hechos conjuntamente!
```{r}
Actividades_Agregadas <- Actividades %>%
group_by(Departamento, Sector) %>%
summarise(Actividades_sum = sum(Actividades)) %>%
spread(Sector, Actividades_sum) %>%
replace(is.na(.), 0) %>%
mutate(Sum_Act = Protección + Integración)
```
{ width=20% }
## Combinar tablas
Para hacer nuestro análisis, queremos juntar nuestras dos tablas en una.
R nos permite hacer esto gracias a la función "join".
Hay cuatro tipos de join:

## Combinar tablas
Para nuestra tabla, vamos a utilizar la función "left_join":
```{r}
colnames(Proyecciones)[1] <- "Departamento"
RMRP_Datos <- left_join(Proyecciones, Actividades_Agregadas, by = "Departamento")
```
Luego podemos verificar que todos los datos sean combinados:
```{r}
summary(RMRP_Datos)
summary(Actividades_Agregadas)
```
## Revisiones finales
Dado que nuestro análisis va a ser solo sobre Protección e Integracíon, no necesitamos otros sectores:
```{r}
RMRP_Datos <- select(RMRP_Datos, Departamento, contains("Proyeccion"), contains("Protección"),
contains("Integración"), Sum_Act)
```
## Para recapitular
- **Importar** datos: read_excel
- **Sumar y agregar** datos: summarise
- **Filtrar** valores: filter
- **Seleccionar** variables: select
- **Cambiar formato** de datos (ancho-largo): spread
- **Crear** nuevas variables: mutate
- **Combinar** tablas: join
<file_sep>---
title: "05-EJ-manipulacionII"
output: html_document
---
```{r setup, include=FALSE}
# este bloque de codigo setea nuestra sesion y carga tidyverse
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
```
# Manipulacion II
En esta libreta practicamos las funciones de `tidyr`.
## Primero cargamos los datos
Los chequea que te encuentres en el proyecto correcto y que tus datasets esten ubicados en la carpeta data contenida en este proyecto
```{r}
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones
```
```{r}
paises_largo <- read_csv("data/paises_largo.csv")
paises_largo
```
## Tu turno 1: `pivot_wider`
Covertir paises_largo al formato ordenado (variables como columnas)
```{r, message = FALSE}
paises_largo_tidy <- pivot_wider(paises_largo,
names_from = variable,
values_from = valor)
paises_largo_tidy
```
## Tu turno 2: para practicar después
Nos interesa anexar la informacion de la esperanza de vida de los países de origen a la tabla de decisiones: Unir la tabla de países obtenida en Tu turno 1 (paises_largo_tidy) con la tabla de decisiones de asilo (decisiones). Utiliza como llave a las columnas con el nombre del país y el año.
```{r}
# no llegamos a verlo
```
<file_sep>---
title: "Lectura de datos"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Lectura de datos", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Lectura de datos
En general, el primer paso de nuestro flujo de trabajo es importar los datos a R
Una vez importados los datos, podremos:
- Modificarlos
- Graficarlos
- Modelarlos
- Comunicar nuestros resultados
Los tipos de archivos de almacenamiento de datos más comunes son:
- Excel
- CSV (Comma Separated Values)
Tambien existen
- json
- XML, etc.
---
<div class="my-header"></div>
## Lectura de datos CSV
File > Import Dataset > From Text...(readr)
<center><img src="img/importar-asilo.png" height="450"></center>
---
<div class="my-header"></div>
## Lectura de datos CSV - Opciones
2. Seleccionamos el archivo deseado
3. Si es CSV, debe decir Delimiter: Comma
4. También prestar atención donde dice First Row as Names
5. Si hay filas que hay que saltear (que no sean los nombres), podemos usar Skip
6. RStudio adivina el tipo de la columna en base a sus primeros valores, podemos cambiar el tipo si está equivocado
6. Importante!: Cuando terminamos de elegir las opciones, copiamos el código que se encuentra en el rectángulo `Code Preview`
7. Hacer click en importar
Una vez que importamos datos vamos a tener disponible un dataset del tipo `tibble` en la mesada
---
<div class="my-header"></div>
## Lectura de datos
La última línea de código que copiamos es
> `View(nombre_dataset)`
Ese comando sirve para visualizar las filas y columnas del dataset. También si corremos
> `nombre_dataset`
veremos en la consola (o justo debajo del bloque de código si es un RMarkdown) algunas pocas filas del dataset.
El comando `str(nombre_dataset)` devuelve una descripción general del dataset. Algunos campos que nos devuelve el comando son:
- Tipo de dato (tibble)
- Tamaño ([filas x columnas])
- Nombre y tipo de las columnas, etc.
Posibles tipos de columnas: texto (`character`), números (`double`, `numeric`, `integer`), fechas (`Date`, `DateTime`), lógicos (`logical`), categóricas (`factor`), etc.
---
<div class="my-header"></div>
## Tu turno: Lectura de datos CSV
1. importar el archivo "decisiones_asilo_peru.csv" (Fuente: UNHCR) con información sobre las decisiones tomadas en Perú frente a los pedidos de asilo recibidos provenientes de distintos países
2. Indicar que cantidad de filas y columnas tiene
---
<div class="my-header"></div>
## Lectura de datos de Excel
File > Import Dataset > From Excel
<center><img src="img/importar-haiti.png" height="450"></center>
---
<div class="my-header"></div>
## Lectura de datos de Excel
- El paquete utilizado es `readxl`
**Importante!**
- Número de página donde se encuentra el dataset
- Rango de celdas donde se encuentra
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Desafío 1: Lectura de datos ", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
---
<div class="my-header"></div>
## DESAFÍO 1: Lectura de datos
El objetivo de este desafío es que armes un proyecto y leas una archivo de datos para aplicar lo que aprendas en el resto del curso.
1. Crea un proyecto de RStudio llamado datos-haiti
3. Crea un archivo de RMarkdown (por las dudas, ésta es la sección asociada).
2. Arma un chunk para leer los datos. Importa el archivo disponible en https://data.humdata.org/dataset/27baf423-2d13-49a4-bd72-cb65d8f924da/resource/9f71ab56-8a9a-4df2-be5d-f26d3600a285/download/dtm-haiti-site-assessment-round-33.xlsx con información sobre distintos campamentos en Haití (Fuente: OIM).
4. Además de poner un título informativo, describe los datos con tus palabras. ¿Cuál es la fuente? ¿Qué variables incluyen? ¿Cuántas observaciones tiene? ¿Qué tipo de datos tiene cada columna?
---
<div class="my-header"></div>
## Observaciones sobre el desafío:
- Notar que se puede importar directo usando el url
- Observa que la segunda fila no debería aparecer como una observación del dataset. En el futuro veremos cómo quitarla
- Esta fila arruina la detección del tipo de todas las columnas numéricas
- Arreglar el tipo de las columnas
---
<div class="my-header"></div>
## Escritura de datos
Les presento las cheatsheets o guías rápidas (en español) `r emo::ji("star")`
[https://rstudio.com/resources/cheatsheets/](https://rstudio.com/resources/cheatsheets/)
Nos describen brevemente la mayoria de las posibilidades que nos brinda un paquete `r emo::ji("gift")`
---
<div class="my-header"></div>
## Escritura de datos
Con `readr` puedo escribir archivos csv o de texto
**Tu turno:** Con la ayuda de la guías rápida de `readr` guardar el dataset de haiti con un nombre distinto en la carpeta data
Para escribir archivos excel puedes usar el paquete `xls`
---
<div class="my-header"></div>
## Escritura de datos
En este modulo aprendimos a
- leer datos en R
- en formato csv
- en formato .xls o .xlsx
- escribir datos en R
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y <NAME>
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
<file_sep>---
title: "Intro"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Introducción", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
```{r, results='asis', echo=FALSE}
cat("##")
palabra = unlist(strsplit("Hola!", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("\n")
```
- Me llamo **Violeta**
- Soy **Matemática** e **Instructora Certificada por RStudio**
- Me gusta **visualizar datos, visitar museos, tomar té, etc, etc.**
<center>
<img src="img/grafico.jpg" alt="Hex stickers" width="250">
<img src="img/te.jpg" alt="Hex stickers" width="280">
</center>
<center>
<img src="img/museo.jpg" alt="Hex stickers" width="200">
</center>
---
<div class="my-header"></div>
## Objetivos del taller
- Entender qué son y para qué sirven R y RStudio
- Saber buscar ayuda en el mundo R
- Poder hacer un análisis de datos sencillo y completo, todo en R
- Poder aprovechar R para introducirlo en su flujo de trabajo habitual
## Modalidad del taller
- Presentación de los temas con ejemplos
- Ejercicios intercalados para interactuar
- "Live coding"
## Requisitos
- Asumimos que ya tienen instalado R y RStudio
- En caso contrario: seguir los pasos en este video
---
<div class="my-header"></div>
## Programa tentativo
<style type="text/css">
.tg {border-collapse:collapse;border-spacing:0;}
.tg td{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
overflow:hidden;padding:10px 5px;word-break:normal;}
.tg th{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
font-weight:normal;overflow:hidden;padding:10px 5px;word-break:normal;}
.tg .tg-jass{background-color:#656565;border-color:#9b9b9b;color:#FFF;font-weight:bold;text-align:center;vertical-align:top}
.tg .tg-3gal{background-color:#656565;border-color:#9b9b9b;color:#FFF;text-align:center;vertical-align:top}
.tg .tg-meum{background-color:#656565;border-color:#9b9b9b;font-weight:bold;text-align:center;vertical-align:top}
.tg .tg-fz0b{background-color:#FFF;border-color:#9b9b9b;color:#343434;text-align:center;vertical-align:top}
.tg .tg-gzjo{background-color:#FFF;border-color:#9b9b9b;text-align:left;vertical-align:top}
.tg .tg-o92t{background-color:#656565;border-color:#9b9b9b;text-align:left;vertical-align:top}
</style>
<table class="tg">
<thead>
<tr>
<th class="tg-meum"></th>
<th class="tg-jass">Día 1 (25/11)</th>
<th class="tg-jass">Día 2 (26/11)</th>
<th class="tg-jass">Día 3 (27/11)</th>
</tr>
</thead>
<tbody>
<tr>
<td class="tg-3gal">09:00-10:00</td>
<td class="tg-fz0b">Intro al curso</td>
<td class="tg-fz0b">Visualizacion de datos</td>
<td class="tg-gzjo" rowspan="6"><br><br><br><span style="color:#343434">- Paquete koboloader </span><br><span style="color:#343434">- QualMiner: Análisis de texto </span><br><span style="color:#343434">- Tratamiento y pocesamiento de datos - 5W</span></td>
</tr>
<tr>
<td class="tg-3gal">10:00-10:05</td>
<td class="tg-3gal">Pausa</td>
<td class="tg-3gal">Pausa</td>
</tr>
<tr>
<td class="tg-3gal">10:05-10:55</td>
<td class="tg-fz0b">Intro a R y RStudio</td>
<td class="tg-fz0b">Visualizacion + Manipulacion</td>
</tr>
<tr>
<td class="tg-3gal">10:55-11:10</td>
<td class="tg-3gal">Pausa</td>
<td class="tg-3gal">Pausa</td>
</tr>
<tr>
<td class="tg-3gal">11:10-11:40</td>
<td class="tg-fz0b">RStudio y proyectos</td>
<td class="tg-fz0b">Mas manipulacion y funciones</td>
</tr>
<tr>
<td class="tg-3gal">11:40-11:45</td>
<td class="tg-3gal">Pausa</td>
<td class="tg-3gal">Pausa</td>
</tr>
<tr>
<td class="tg-3gal">11:45-13:00</td>
<td class="tg-fz0b"> RMarkdown</td>
<td class="tg-fz0b">Interactuar con Kobo y Power BI</td>
<td class="tg-o92t"></td>
</tr>
<tr>
<td class="tg-3gal">13:00-14:00</td>
<td class="tg-3gal">Almuerzo</td>
<td class="tg-3gal">Almuerzo</td>
<td class="tg-o92t"></td>
</tr>
<tr>
<td class="tg-3gal">14:00-15:00</td>
<td class="tg-fz0b">Lectura de datos</td>
<td class="tg-fz0b">Gráficos, reportes y tablas</td>
<td class="tg-o92t"></td>
</tr>
<tr>
<td class="tg-3gal">15:00-16:00</td>
<td class="tg-fz0b">Manipulacion de datos</td>
<td class="tg-fz0b">Gráficos, reportes y tablas</td>
<td class="tg-o92t"></td>
</tr>
</tbody>
</table>
---
<div class="my-header"></div>
## Desafíos de hoy
- Leer correctamente una base de datos
- Generar nuevas columnas a partir de información de otras columnas y generar resúmenes a partir de los datos
- Generar gráficos a partir de la base de datos usada
---
<div class="my-header"></div>
```{r xaringan-editable, echo=FALSE}
xaringanExtra::use_editable(expires = 1)
```
```{r, results='asis', echo=FALSE}
cat("## ")
palabra = unlist(strsplit("Qué herramientas para análisis de datos utilizan?", split = ""))
cat(print_it_rainbow(palabra, paleta))
```
.flex[
.can-edit[
- Excel
- GIS
- Power BI
- Word
]
]
.flex[
.w-50[
**Ventajas**
.can-edit.key-rstudio-gets-me[
- Intuitivos para usar
]
]
.w-50[
**Desventajas**
.can-edit.key-rstudio-gets-me-not[
- Son muy específicos para cada tarea
]
]
]
---
<div class="my-header"></div>
## Si ya tenemos:
- Excel
- Kobo
- GIS
- Power BI
- Word
- InDesign
- Ilustrator
## Entonces por qué R?
## Porque con R podemos:
- Realizar la mayoría de esas tareas
- Automatizar procesos
- Interactuar facilmente con muchas herramientas
Y además... es abierto y gratuito
---
```{r, results='asis', echo=FALSE}
cat("## ")
palabra = unlist(strsplit("Desventaja", split = ""))
cat(print_it_rainbow(palabra, paleta))
```
<center><img src="img/anteseratipo.png" alt="Hex stickers" height="510"></center>
---
<div class="my-header"></div>
## Algunas cosas que podemos hacer en R
- Leer datos
- Transformar y resumir datos
- Visualizar datos
- Generar reportes automáticos
- Crear sitios web. p. ej.: https://violetr.netlify.app/
- Desarrollar aplicaciones interactivas:
- https://shiny.rstudio.com/gallery/india-blood-banks.html
- https://violetr.shinyapps.io/hiv_hnp/
- https://vac-lshtm.shinyapps.io/ncov_tracker
---
<div class="my-header"></div>
## R en contextos humanitarios de emergencia
- Hay un grupo de usuarios y desarrolladores de R que trabajan en esta área:
[HumanitaRian useR group](https://humanitarian-user-group.github.io/about/)
- Creado en una reunión de la UNHCR
- Blog y grupo de Skype
- Documentación y Reproducibilidad de los procesos
- Hay funciones específicamente hechas para manejar este tipo de datos y conectarse con herramientas populares como Kobo
- Existes manuales de buenas prácticas sobre como crear reportes. Por ejemplo, [How to quickly produce statistical reports?
The UNHCR “R-Cookbook”](https://edouard-legoupil.github.io/unhcr_paged/)
---
<div class="my-header"></div>
## Material del curso
El material del taller está disponible en
.flex[
.can-edit[
- url
]
]
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado en:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y <NAME>
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Empezamos!", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Qué es R? Qué es RStudio?
- R es el lenguaje de programación, es la forma de darle instrucciones a la computadora
- RStudio es la interfaz gráfica (lo que vemos)

---
<div class="my-header"></div>
## RStudio: Como se ve la primera vez
A la izquierda, en la consola, podemos escribir código en R
Este se ejecutará al apretar `Enter`

---
<div class="my-header"></div>
## RStudio: Como se ve habitualmente
Vamos a hacer cosas interesantes, y vamos a querer guardarlas
Por eso la configuración normal de RStudio será asi, con cuatro paneles
El de arriba a la izquierda es un bloc de notas

---
<div class="my-header"></div>
## RStudio culinario

<p style="color: gray; font-size:15px"> Imagen del curso "R para Clima" - <NAME> y <NAME> <br>
https://eliocamp.github.io/r-clima/index.html
</p>
---
<div class="my-header"></div>
## Cómo le damos instrucciones a R?
Como ya vieron en el video para instalar R, podemos pedirle a R que compute operaciones básicas.
Y también no tan básicas.
```{r}
2 + 2
```
```{r}
sqrt(2)
```
Esto solo nos devuelve el valor
Pero si queremos guardarlo en algún lugar (para que quede disponible en la mesada):
```{r}
raiz_dos <- sqrt(2)
raiz_dos
```
---
<div class="my-header"></div>
## Variables en R
```{r}
raiz_dos <- sqrt(2)
raiz_dos
```
**En Excel:**
Es parecido a hacer =SQRT(2) (o =RCUAD(2)) en la celda A2
Sabemos que ese resultado está en la celda A2
**En R:**
Ese resultado está en la variable `raiz_dos` que aparece en el environment (la mesada)
**Pero... La analogía no es 100% util:**
[https://PollEv.com/multiple_choice_polls/P2xOvcUGbMWv91CLwX6o1/respond](https://PollEv.com/multiple_choice_polls/P2xOvcUGbMWv91CLwX6o1/respond)
```{r}
x <- 2
y <- x + 2
x <- 3
```
---
<div class="my-header"></div>
## Vectores en R
- Es la unidad básica en R
- Casi todo es un vector (por eso vemos el [1] a la izquierda en los resultados)
- Todos los elementos de un vector son de un mismo tipo (numérico, caracter, etc)
```{r}
vector1 <- c(3, 5, 6.1, 7)
vector1
```
```{r}
vector2 <- 1:20
vector2
```
Puedo calcular el promedio de los elementos de un vector
```{r}
mean(vector2)
```
---
<div class="my-header"></div>
## Vectores en R
```{r}
vector1 <- c(3, 5, 6.1, 7)
vector1
```
Puedo acceder al elemento que está en el lugar 2 del vector
```{r}
vector1[2]
```
Puedo obtener todos los valores de un vector salvo el elemento del lugar 2 asi:
```{r}
vector1[-2]
```
---
<div class="my-header"></div>
## Listas en R
Qué pasa si quiero guardar más de un tipo de elemento en una variable?
Listas! se crean con list() y pueden contener cualquier tipo de datos, incluso listas
```{r}
mi_agenda <- list(Carlos = c(tel = 41232342),
Maria = c(tel = 25362819, edad = 25))
mi_agenda$Carlos
mi_agenda[[2]]
```
---
<div class="my-header"></div>
## Funciones en R
R tiene funciones útiles incorporadas directamente
> `sqrt`, `+`, `-`, `mean`, `sum`, etc
Se aplican a una o más variables y devuelven un valor
Las entradas de las funciones (puede ser 1 o más) se llaman parámetros
Pueden ser obligatorios u opcionales
> `sqrt` se aplica a un número positivo y devuelve su raíz cuadrada
Pero... nuestro objetivo es **realizar análisis de datos**
`r emo::ji("star")` **necesitamos más funciones!** `r emo::ji("star")`
Tenemos acceso a más funciones que las básicas a partir de **paquetes** `r emo::ji("gift")[1]`
---
<div class="my-header"></div>
## Hay muuuuchos paquetes de R
- De distintas áreas del conocimiento:
- Genética,
- Ciencia de datos en general,
- R humanitario, etc.
- Podemos hacer nuestro propio paquete para nuestro flujo de trabajo
<center><img src="img/aushex.png" alt="Hex stickers" height="320"></center>
---
<div class="my-header"></div>
## Ciencia de datos
Flujo de trabajo:

Existe un **conjunto de paquetes** para realizar estas tareas
<center><img src="img/hex-tidyverse.png" alt="Hex stickers" height="150"></center>
<p style="color: gray; font-size:13px"> Imágenes de "R para Ciencia de Datos" y Rstudio</p>
---
<div class="my-header"></div>
## `tidyverse`
- tidy = ordenado
- ...verse = universo
<center><img src="img/tidyhexall.png" alt="Hex stickers" height="360s"></center>
---
<div class="my-header"></div>
## Instalar paquetes en R
Para instalar paquetes, usamos la función `install.packages()`
```{r eval=FALSE}
install.packages("readr")
```
instala un paquete que contiene funciones para leer datos.
Usando el comando
```{r, eval=FALSE}
library(readr)
```
Las funciones del paquete `readr` estarán disponibles. p. ej.: `read_csv`
**Tu turno:** Instalar el paquete `readr`
**Tu turno Bis:** Instalar `tidyverse`
---
<div class="my-header"></div>
## Pedir ayuda en R
Hay muchas formas de pedir ayuda en R
Una opción es usar la **ayuda de R**
```{r, eval=FALSE}
?nombre_funcion
```
- `Description`: breve descripción de la función
- `Usage`
- `Arguments`: argumentos de la función
- `Details`
- `Value`: lo que devuelve la función
- `Examples`
**Tu turno:** Abre y lee la ayuda de la función `sd()`. Puede que haya cosas que aún no entiendas, pero trata de captar la idea general. ¿Qué hace esa función? ¿Qué argumentos acepta?
---
<div class="my-header"></div>
## Pedir ayuda en R
Otra opción es **preguntar a instructor@ o colegas**
Otra, **googlear**
Fórmula "óptima" (para mi) para googlear:
> R + pregunta o error descriptivo y específico
o, si estamos usando tidyverse
> tidyverse + pregunta o error descriptivo y específico
Es mejor buscar en inglés (hay más material)
Algunas buenas fuentes:
- stackoverflow (Preguntas y respuestas)
- rstudio community (Foro)
---
<div class="my-header"></div>
## Material del curso
El material del taller está disponible en
.flex[
.can-edit[
- url
]
]
---
<div class="my-header"></div>
## Licencia y materiales
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y <NAME>
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
<file_sep>---
title: "Intro"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Reportes paramétricos y más", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Personalizando reportes
Ya sabemos lo basico sobre generar reportes.
- RMarkdown y sus elementos
- bloques de codigo (chunks)
- texto
- encabezado (hasta ahora no lo tocamos)
- Tipo de output
- Knit para generar
Pero ya vimos que las salidas por ahora no son muy lindas
En este modulo vamos a ver como personalizarlos para que sean publicables y mas eficientes
---
<div class="my-header"></div>
## Agregar tabla de contenidos
Por ejemplo, si quiero agregar una tabla de contenidos con los titulos y subtitulos puedo agregar en el encabezado el termino "toc: TRUE"
```{r, eval = FALSE}
---
output:
html_document:
toc: TRUE
---
```
Es importante que respetemos la estructura tal como se indica! (los espacios y la alineacion de toc con respecto a html_document)
```{r, eval = FALSE}
---
output:
word_document:
toc: TRUE
---
```
---
<div class="my-header"></div>
## Automatizando reportes
Es muy común tener que hacer un reporte cuyo resultado dependa de ciertos parámetros.
Por ejemplo, podrías tener un reporte que analiza las decisiones a los pedidos de asilo de personas colombianas en Perú.
```{r, message = FALSE, eval=FALSE}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones_filtrado <- decisiones %>%
filter(`Codigo Pais Origen` == "COL")
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5)
```
---
<div class="my-header"></div>
## Automatizando reportes
Es muy común tener que hacer un reporte cuyo resultado dependa de ciertos parámetros.
Por ejemplo, podrías tener un reporte que analiza las decisiones a los pedidos de asilo de personas colombianas en Perú.
```{r, message = FALSE, echo=FALSE}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones_filtrado <- decisiones %>%
filter(`Codigo Pais Origen` == "COL")
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5)
```
---
<div class="my-header"></div>
## Automatizando reportes
Si ahora querés hacer el mismo reporte pero para Haiti, tienes que abrir el archivo y modificar la llamada a `filter` para quedarte sólo con ese país:
```{r, message=FALSE, eval = FALSE}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones_filtrado <- decisiones %>%
filter(`Codigo Pais Origen` == "HTI")
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5)
```
Si el reporte es largo y usa el nombre del país en múltiples lugares cambiar "COL" por "HTI" puede ser tedioso y propenso a error, ya que te obliga a modificar muchas partes del código.
---
<div class="my-header"></div>
## Automatizando reportes
Si ahora querés hacer el mismo reporte pero para Haiti, tienes que abrir el archivo y modificar la llamada a `filter` para quedarte sólo con ese país:
```{r, message=FALSE, echo=FALSE}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones_filtrado <- decisiones %>%
filter(`Codigo Pais Origen` == "HTI")
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5)
```
Si el reporte es largo y usa el nombre del país en múltiples lugares cambiar "COL" por "HTI" puede ser tedioso y propenso a error, ya que te obliga a modificar muchas partes del código.
---
<div class="my-header"></div>
## Parametrizando reportes
En estas situaciones podés crear un reporte parametrizado. La idea es que el reporte tiene una serie de parámetros que puede modificar la salida. Es como si el archivo de R Markdown fuera una gran función con sus argumentos!
Para generar un reporte parametrizado hay que agregar un elemento llamado params al encabezado con la lista de parámetros y sus valores por default.
```{r, eval=FALSE}
params:
pais: COL
```
---
<div class="my-header"></div>
## Parametrizando reportes
```{r, eval=FALSE}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones_filtrado <- decisiones %>%
filter(`Codigo Pais Origen` == params$pais)
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5)
```
---
<div class="my-header"></div>
## Automatizando reportes
Posibilidades
```{r, eval = FALSE}
for (pais_actual in c("HTI", "VEN", "COL")) {
rmarkdown::render("reporte_final.Rmd",
output_file = paste0("reporte-",
pais_actual,
".docx"),
params = list(pais = pais_actual))
}
```
Este codigo crea 3 reportes!
---
<div class="my-header"></div>
## Control de chunks
Hay una serie de opciones que controlan si el código se ejecuta y si el resultado del código va a quedar en el reporte o no:
- `eval = FALSE` evita que se corra el código del chunk, de manera que tampoco va a mostrar resultados. Es útil para mostrar códigos de ejemplo si estás escribiendo, por ejemplo un documento para enseñar R.
- `echo = FALSE` corre el código del chunk y muestra los resultados, pero oculta el código en el reporte. Esto es útil para escribir reportes para personas que no necesitan ver el código de R que generó el gráfico o tabla.
- `include = FALSE` corre el código pero oculta tanto el código como los resultados. Es útil para usar en chunks de configuración general donde cargas las librerías.
---
<div class="my-header"></div>
## Control de chunks
- Si estás escribiendo un informe en el que no querés que se muestre ningún código, agregarle echo = FALSE a cada chunk nuevo se vuelve tedioso.
- Solución: cambiar la opción de forma global de manera que aplique a todos los chunks. Esto se hace mediante la función knitr::opts_chunk$set()
, que setea las opciones globales de los chunks que le siguen. (Ya esta agregado en el primer bloque del documento reporte_final.Rmd)
---
<div class="my-header"></div>
## Control de chunks
- Vimos que a veces algunas funciones imprimen mensajes sobre lo que hacen. Por ejemplo, cuando read_csv lee un archivo describe el tipo de dato de cada columna
- Esto es útil pero en general no quiere que quede en el reporte. Para que no muestre estos mensajes basta con poner la opción message = FALSE
- Si queremos evitar una advertencia (warning) podemos indicar tamben warning = FALSE, separado por coma de los otros mensajes
---
<div class="my-header"></div>
## Tu turno
En el archivo reporte_final.Rmd elige las opciones adedcuadas para cada chunk
---
<div class="my-header"></div>
## Mejores reportes
- Mejores graficos
- Mejores tablas
---
<div class="my-header"></div>
## Temas en `ggplot2`
- [https://ggplot2.tidyverse.org/reference/ggtheme.html](https://ggplot2.tidyverse.org/reference/ggtheme.html)
- [https://mran.microsoft.com/snapshot/2017-02-04/web/packages/ggthemes/vignettes/ggthemes.html](https://mran.microsoft.com/snapshot/2017-02-04/web/packages/ggthemes/vignettes/ggthemes.html)
---
<div class="my-header"></div>
## Temas en `ggplot2`
```{r minimal}
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5) +
theme_minimal()
```
---
<div class="my-header"></div>
## Temas en `ggplot2`
```{r ggthemes }
# install.packages("ggthemes")
library(ggthemes)
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5) +
theme_fivethirtyeight()
```
---
<div class="my-header"></div>
## Tablas simples con kable
```{r}
library(knitr)
kable(decisiones)
```
---
<div class="my-header"></div>
## Tablas lindas con `kableExtra`
El paquete `kableExtra`, como su nombre lo indica, nació para extender el poder de la función kable.
Descargalo y prueba sus funcionalidades
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y Elio Campitelli
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME><file_sep>---
title: "04-EJ-visualizacion"
output: html_document
---
Trabajaremos en principio con los dos datasets conocidos hasta ahora: Haiti y decisiones
```{r include=FALSE, message=FALSE}
library(tidyverse)
library(readxl)
url_haiti <- "https://data.humdata.org/dataset/27baf423-2d13-49a4-bd72-cb65d8f924da/resource/9f71ab56-8a9a-4df2-be5d-f26d3600a285/download/dtm-haiti-site-assessment-round-33.xlsx"
destfile <- "dtm_haiti_site_assessment_round_33.xlsx"
curl::curl_download(url_haiti, destfile)
haiti <- read_excel(destfile,
col_types = c("text", "text", "text",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"text", "text", "text", "text", "text",
"text", "text", "text", "text"))
haiti <- slice(haiti, -1)
haiti
```
```{r, include = FALSE, message=FALSE}
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
decisiones
```
## Tu turno 1: Mi primer gráfico
Copia y corre el código para generar el gráfico que queremos
```{r}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals))
```
## Tu turno 2
Reproducir este histograma de la variable Households con la ayuda de la guía rápida de `ggplot2`. Usa `geom_histogram`
Ayuda: no utilizar la variable `y`
```{r}
ggplot(data = haiti) +
geom_histogram(mapping = aes(Individuals))
```
# Tu turno 3
Reproducir este gráfico de barras que grafica la cantidad de campamentos en cada comuna. Utiliza la ayuda de la guía rápida de `ggplot2`. Usa `geom_bar`
Ayuda: no utilizar la variable `y`
```{r}
ggplot(data = haiti) +
geom_bar(mapping = aes(x = Commune, fill = Commune))
```
# Tu turno 4
Predice la salida del siguiente código
Luego, córrelo
```{r, eval = FALSE}
ggplot(data = haiti) +
geom_point(mapping = aes(x = Households, y = Individuals)) +
geom_smooth(mapping = aes(x = Households, y = Individuals))
```
## Tu turno 5
Salva tu último gráfico en la carpeta img de tu proyecto
Si por algún motivo no estás en un proyecto, vuelve al proyecto del curso
```{r}
ggsave("img/my-plot.pdf")
```
## DESAFIO 2 para practicar despues
1. Importa el dataset `dtm3-peru` (cuidado con la primera fila de la tabla, salteala)
https://data.humdata.org/dataset/dad01c41-adde-46ee-998d-a6baf9eeb8d1/resource/538975d3-f7bb-4f0d-9111-58f7e00ee4f1/download/dtm3_version_publica_1209-1.xlsx
2. Cuántas personas respondieron la encuesta?
3. Haz un gráfico que represente la distribución del "Número de Familias que viven en la misma carpa o techo"
4. Representa con la ayuda de la guía rápida un gráfico de barras lado a lado del "Número de Familias que viven en la misma carpa o techo" que discrimine según distrito
```{r}
library(readxl)
url <- "https://data.humdata.org/dataset/dad01c41-adde-46ee-998d-a6baf9eeb8d1/resource/538975d3-f7bb-4f0d-9111-58f7e00ee4f1/download/dtm3_version_publica_1209-1.xlsx"
destfile <- "dtm3_version_publica_1209_1.xlsx"
curl::curl_download(url, destfile)
dtm3_version_publica_1209_1 <- read_excel(destfile,
col_types = c("text", "text", "text",
"text", "text", "date", "text", "text",
"text", "text", "text", "text", "numeric",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "text", "text",
"text", "text", "text", "text", "text",
"text", "text", "text", "text", "text",
"text", "numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"text", "numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "text", "text",
"numeric", "numeric", "numeric",
"numeric", "numeric", "text", "text",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "text", "text", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"text", "text", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "text", "text", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "text", "text",
"text", "text", "numeric", "text",
"text", "text"), skip = 1)
View(dtm3_version_publica_1209_1)
# 2. cantidad de filas
nrow(dtm3_version_publica_1209_1)
# 3.
ggplot(dtm3_version_publica_1209_1) +
geom_bar(aes(x = `Número de Familias que viven en la misma carpa o techo`))
# 4.
ggplot(dtm3_version_publica_1209_1) +
geom_bar(aes(x = `Número de Familias que viven en la misma carpa o techo`)) +
facet_wrap(~Distrito)
```
<file_sep>---
title: "Interaccion con Kobo y Power BI"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Interacción con Kobo y Power BI", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
<div class="my-header"></div>
## Kobo
Supongamos que tenemos que realizar muchas veces por semana un reporte de datos proveniente de un relevamiento recogido en Kobo Toolbox.
Tenemos muchas formas de hacerlo pero lo mas comodo sera con un único código que podamos ejecutar y que nos genere el reporte automáticamente.
Podemos hacer esto con R a través de la API (Application Programming Interface) de Kobo Toolbox.
---
<div class="my-header"></div>
## Kobo
Tenemos que generar en R una URL que le indique a Kobo lo que necesitamos
```{r, eval=FALSE}
install.packages(c("httr", "jsonlite"))
library(httr)
library(jsonlite)
```
```{r, eval=FALSE}
# A completar con tus datos
usuario = "tallerrkobo"
contrasenia = "TallerRKobo123"
form_id = "559546"
# puedes obtener el numero de tu formulario aqui :
# https://im.unhcr.org/kobosupport/#form
server = "kc.kobotoolbox.org"
# o "kc.humanitarianresponse.info" o "kc.kobotoolbox.org"
# Generar datos
URL <- paste0("https://", server, "/api/v1/data/", form_id, ".csv")
res <- GET(URL, authenticate(usuario, contrasenia), progress())
respuestas <- read_csv(res$content)
```
---
<div class="my-header"></div>
## Kobo: subir formulario
Existen otras funcionalidades de Kobo
- Desarrollar formularios en R y actualizarlos desde R
- El formato en este caso es xls
```{r, eval = FALSE}
library(devtools)
library(httr)
# correr este codigo para cargar funciones, escribirlo todo en 1 linea:
source_url("https://raw.githubusercontent.com/
ppsapkota/kobohr_apitoolbox/master/
R/r_func_ps_kobo_utils.R")
#"kf.kobotoolbox.org" o "kobo.humanitarianresponse.info"
kpi_url <- "https://kf.kobotoolbox.org/imports/"
kobo_form_xlsx <- "test-formulario.xlsx" #tu formulario
# la siguiente funcion crea un borrador en tu usuario
# con las preguntas indicadas en el xlsx
d_content<-kobohr_kpi_upload_xlsform(kpi_url,
kobo_form_xlsx,
"tallerrkobo",
"TallerRKobo123")
import_url <- d_content$url
```
---
<div class="my-header"></div>
## Kobo: subir formulario
```{r, eval = FALSE}
d_content <- kobohr_kpi_get_asset_uid(import_url, #del paso anterior
"tallerrkobo",
"TallerRKobo123")
asset_uid <- d_content$messages$created$uid
# codigo del formulario que aparecera en borradores en KoboToolbox
```
<center><img src="img/kobotoolbox.png" alt="Hex stickers" height="350"></center>
---
<div class="my-header"></div>
## Power BI
Hay varios tipos de interacción posible entre R y Power BI. Vamos a ver 2 interacciones posibles:
- Carga de datos a través de R
- Generación de gráficos con scripts de R
También puede usarse R para hacer queries de consulta y para importar ciertos datos desde el Market
---
<div class="my-header"></div>
## Power BI - Importar datos
1)
<center><img src="img/PowerBI1.png" alt="Hex stickers" height="450"></center>
---
<div class="my-header"></div>
## Power BI - Importar datos
2)
<center><img src="img/PowerBI2.png" alt="Hex stickers" height="450"></center>
---
<div class="my-header"></div>
## Power BI - Importar datos
3)
<center><img src="img/PowerBI3.png" alt="Hex stickers" height="450"></center>
---
<div class="my-header"></div>
## Power BI - Importar datos
4)
<center><img src="img/PowerBI4.png" alt="Hex stickers" height="450"></center>
---
<div class="my-header"></div>
## Power BI - Graficos de R en Power BI
Generar un grafico de R en Power BI
<center><img src="img/PowerBI5.png" alt="Hex stickers" height="450"></center>
---
<div class="my-header"></div>
## Power BI - Graficos de R en Power BI
Puedo agregar sliders para poder interactuar y filtrar con el grafico
Los gráficos generados en R se actualizaran automáticamente
<center><img src="img/PowerBI6.png" height="400"></center>
---
<div class="my-header"></div>
## Resumen
Para tener un abanico mayor de opciones, conecta a R con otras herramientas!
<center><img src="img/starwars-teamwork.png" alt="Hex stickers" height="450"></center>
<p style="color: gray; font-size:15px"> Ilustración de <NAME>
</p>
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
<file_sep>---
title: "Manipulacion II"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
css: ["default", "default-fonts", "custom.css"]
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
ratio: 13:9
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
knitr::opts_chunk$set(fig.height=5)
library(kableExtra)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> R para contextos humanitarios de emergencia")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Manipulación de datos II", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("<NAME>", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("<h1 style='color:white'> <center> Abrir el archivo 05-EJ-manipulacionII.Rmd")
cat("</center></h1> \n")
cat("## <center>")
palabra = unlist(strsplit("Para ir haciendo los EJ", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
---
<div class="my-header"></div>
## Datos ordenados
El entorno tidyverse esta basado en el concepto de datos ordenados
Un dataset está ordenado si:
- Cada variable está en una columna
- Cada observacion está una fila
- Cada valor se encuentra en una celda
<center><img src="img/tidy.png" height="300"></center>
---
<div class="my-header"></div>
## Transformando a tidy
Veamos la siguiente tabla de la cantidad total de los pedidos de asilo recibidas. Las variables involucradas son país, año y cantidad de cados. Está ordenada la tabla?
```{r, echo = FALSE, message=FALSE}
library(tidyverse)
pedidos_asilo <- tribble(
~Pais, ~"2011", ~"2012", ~"2013",
"ARG", 7000, 6900, 7000,
"PER", 5800, 6000, 6200,
"TUR", 15000, 14000, 13000
)
contaminacion <- tribble(
~ciudad, ~tamanio_particula, ~cantidad,
"Cusco", "p grande", 23,
"Cusco", "p pequenia", 14,
"London", "p grande", 22,
"London", "p pequenia", 16,
"Beijing", "p grande", 121,
"Beijing", "p pequenia", 121
)
knitr::kable(pedidos_asilo, "html")
```
Me gustaría que estuviera así:
```{r, echo = FALSE}
pedidos_asilo_tidy <- pivot_longer(pedidos_asilo,cols = c(2,3,4),names_to = "Anio", values_to = "cantidad")
knitr::kable(pedidos_asilo_tidy)
```
---
<div class="my-header"></div>
## Transformando a tidy: `pivot_longer`
La funcion `pivot_longer` se encarga de esto!
Pasa de una tabla ancha a una table mas larga.
Hay que idenfificar cuáles son las columnas que dejarán de ser fila y pasarán a ser los valores de una nueva columna. Estas columnas serán la llave de la transformación
.pull-left[
```{r, echo=FALSE}
kbl(pedidos_asilo)
```
]
.pull-right[
```{r, echo=FALSE}
kbl(pedidos_asilo_tidy) %>%
column_spec(2, background = "yellow")
```
]
---
<div class="my-header"></div>
## Transformando a tidy: `pivot_longer`
```{r}
pedidos_asilo_tidy <- pivot_longer(pedidos_asilo,
cols = c(2, 3, 4),
names_to = "Anio",
values_to = "cantidad")
```
- data corresponde a la tabla a modificar
- cols corresponde a las **columnas a ser pivoteadas**
- names_to corresponde al **nombre** que agrupará a las columnas de interés
- values_to corresponde al **nombre** de la variable que agrupará a los valores
```{r, echo=FALSE}
kbl(pedidos_asilo_tidy) %>%
column_spec(2, background = "yellow")
```
---
<div class="my-header"></div>
## `pivot_longer`: idea del funcionamiento
desde el **formato ancho** al **formato largo**
<center><img src="img/ancho-a-largo.png" height="450"></center>
<p style="color: gray; font-size:15px"> Imagen del curso "R para Clima" - <NAME> y <NAME> <br>
https://eliocamp.github.io/r-clima/index.html
</p>
---
<div class="my-header"></div>
## Transformando a tidy: `pivot_wider`
Por otro lado, tenemos
```{r, echo = FALSE}
knitr::kable(contaminacion)
```
---
<div class="my-header"></div>
## Transformando a tidy: `pivot_wider`
Que tampoco esta ordenada, pero en el sentido contrario (es larga en este caso !). Me gustaría que estuviera así:
```{r, echo = FALSE}
contaminacion_tidy <- pivot_wider(contaminacion,names_from = tamanio_particula,values_from = cantidad)
knitr::kable(contaminacion_tidy)
```
Debo identificar a las dos columnas que esconden más de una variable. En este caso, tamanio_particula contiene los nombres de las futuras variables (names_from), y cantidad esconde el nombre de la segunda variable
```{r}
contaminacion_tidy <- pivot_wider(contaminacion,
names_from = tamanio_particula,
values_from = cantidad)
```
me devuelve el resultado deseado
---
<div class="my-header"></div>
## Tu turno 1: `pivot_wider`
Covertir paises_largo al formato ordenado (variables como columnas)
```{r, message = FALSE}
paises_largo <- read_csv("data/paises_largo.csv")
knitr::kable(paises_largo[1:6,])
```
---
<div class="my-header"></div>
## Combinando tablas de datos!
Hasta ahora todo lo que usamos de `dplyr` involucra trabajar y modificar con una sola tabla a la vez
En ese caso, tenemos que unir estas tablas. a partir de una o más variables en común o keys.
**En Excel**:
“VLOOKUP” o “BUSCARV”
**En R:**
familia de funciones *_join().
Hay una función cada tipo de unión que queramos hacer.
---
<div class="my-header"></div>
## familia `join`
<center><img src="img/join.png" height="180"></center>
- `full_join():` devuelve todas las filas y todas las columnas de ambas tablas `x` e `y`. Cuando no coinciden los elementos, devuelve `NA` (dato faltante). Esto significa que no se pierden filas de ninguna de las dos tablas aún cuando no hay coincidencia.
- `left_join():` devuelve todas las filas de `x` y todas las columnas de `x` e `y`. Las filas en `x` que no tengan coincidencia con `y` tendrán `NA` en las nuevas columnas. Si hay múltiples coincidencias entre `x` e `y`, devuelve todas las coincidencias posibles.
- `inner_join():` devuelve todas las filas de `x` donde hay coincidencias con `y` y todas las columnas de `x` e `y`. Si hay múltiples coincidencias entre `x` e `y`, entonces devuelve todas las coincidencias. Eliminará las filas (observaciones) que no coincidan en ambas tablas.
---
<div class="my-header"></div>
## full_join
<center><img src="img/join_family.png" height="450"></center>
<p style="color: gray; font-size:15px"> Imagen del curso "R para Clima" - <NAME> y <NAME> <br>
https://eliocamp.github.io/r-clima/index.html
</p>
---
<div class="my-header"></div>
## Tu turno 2: para practicar después
Unir la tabla de países obtenida en Tu turno 1 con la tabla de decisiones de asilo (decisiones_asilo_peru). Utiliza como llave a las columnas con el nombre del país y el año.
---
<div class="my-header"></div>
## Licencia y material usado
Licencia: [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES).
Este material está inspirado y utiliza explicaciones de:
- [R para Clima](https://eliocamp.github.io/r-clima/) de <NAME> y <NAME>itelli
- [Master the Tidyverse](https://github.com/rstudio-education/master-the-tidyverse-instructors) de <NAME>
<file_sep>---
title: "R y RStudio"
date: "`r Sys.Date()`"
output:
xaringan::moon_reader:
mathjax: "https://cdn.bootcss.com/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_HTMLorMML"
lib_dir: libs
seal: false
nature:
highlightStyle: github
highlightLines: true
countIncrementalSlides: false
---
```{r setup, include=FALSE}
options(htmltools.dir.version = FALSE)
#pagedown::chrome_print("C:\\Users\\violeta\\Documents\\AdvStats\\EM_algo.html")
knitr::opts_chunk$set(fig.height=5)
library(ggplot2)
library(readr)
library(here)
library(dplyr)
library(colorspace)
paleta <- rainbow_hcl(10)
print_it_rainbow <- function(palabra, paleta) {
i_font = 1
for (char in palabra) {
if (char != " ") {
cat(paste0("<font style='color:", paleta[i_font] , "'>",char,"</font>"))
i_font = i_font %% length(paleta) + 1
} else {
cat(" ")
}
}
}
```
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("R y RStudio", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("## <center>")
palabra = unlist(strsplit("Descarga e instalación", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
cat("### <center>")
fecha = as.character(Sys.Date())
palabra = unlist(strsplit("18-11-2020", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center>")
```
---
# Objetivo
- Instalar el lenguaje de programación R
- Instalar RStudio (una interfaz gráfica para usar R más cómodamente)
Sistema operativo: Windows 10
# Tareas
- Descargar R
- Instalar R
- Descargar RStudio
- Instalar RStudio
---
# Descargar R
## https://cran.r-project.org/bin/windows/base/
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Instalar R", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
---
# Descargar RStudio
### https://rstudio.com/products/rstudio/download/#download
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Instalar RStudio", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
---
background-color: #272822
<br>
<br>
<br>
<br>
<br>
<br>
```{r, results='asis', echo=FALSE}
cat("# <center>")
palabra = unlist(strsplit("Listo!", split = ""))
cat(print_it_rainbow(palabra, paleta))
cat("</center> \n")
```
<file_sep>---
title: "Reporte Final"
output:
word_document:
toc: TRUE
html_document:
toc: TRUE
params:
pais: HTI
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = FALSE)
```
## Importo datos
En esta seccion explicamos nuestros datos
```{r importar, echo=FALSE, message=FALSE, warning=FALSE}
library(tidyverse)
decisiones <- read_csv("data/decisiones_asilo_peru.csv")
```
```{r message=FALSE, warning=FALSE}
library(knitr)
kable(decisiones[1:6, 1:3])
```
## Filtro
En esta seccion explicamos el proceso de manipulacion
```{r}
decisiones_filtrado <- decisiones %>%
filter(`Codigo Pais Origen` == params$pais)
```
## Grafico
Por ultimo aqui hacemos nuestro grafico
```{r}
decisiones_filtrado %>%
ggplot(aes(Anio, Reconocidas)) +
geom_line(color = "red", size = 1.5) +
labs(title = "Pedidos de asilo reconocidos por Peru")
```
<file_sep>---
title: "Capacitación en R"
author: "<NAME>"
date: "25/11/2019"
output:
ioslides_presentation:
logo: assets/img/lama_gtrm.png
css: test.css
transition: slower
widescreen: true
smaller: true
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = FALSE)
```
## Bienvenidos
<br>
**Bienvenidos a nuestra capacitación en R para manejo de información!**
<br>
<br>
Vamos a pasar este día aprendiendo sobre la herramienta R y todas sus funcionalidades para nuestro trabajo :)
<br>
<br>
-- Equipo GTRM Perú
## Qué es R?
<br>
1. **Lenguaje de programación** con enfoque al análisis estadístico
<br>
2. Uno de los **lenguajes estadísticos** más utilizados en investigación cientifica
<br>
3. Una de las herramientas más importantes para **data science**
## Organizaciones que utilizan R
{ width=20% } { width=20% }
{ width=20% } { width=5% }
{ width=5% } { width=20% }
## R en los sectores humanitarios y de desarrollo
**Taller en Amman (5-6 Noviembre, 2018)** { width=50% }
**Visión para el futuro:**
R, cómo lenguaje y a través de las herramientas/paquetes que lo constituyen, es la opción de preferencia de nuestra comunidad porqué permite:
- Abarcar las brechas con buenas prácticas desde las industria de data science y de la academia;
- Minimizar las dependencias de un único proveedor, licencia u organización;
- Construir y expander los conocimientos entre organizaciones diferentes.
<font size="2">Para más información: https://humanitarian-user-group.github.io/post/first-meeting/</font>
## Porqué utilizar R en lugar de Excel?
<br>
- **Para los estadísticos:** Fuentes de datos demasiado grandes para Excel, "Big Data"
<br>
- **Más común en IM:** Ganar tiempo haciendo tareas repetitivas
<br>
- **Para ambos:** Asegurar trabajo reproducible
<br>
Y también: 100% gratuito y open source!
## Desde "clic" hacia "script" (1)
Un flujo de trabajo tradicional es el siguiente:
<br>
1. Ingresar en Kobo, ir al formulario, descargar los datos; { width=7% }
<br>
2. Abrir el archivo en Excel, transformar datos manualmente (copiar, pegar, tablas dinámicas, etc) y preparar gráficos { width=5% }
3. Exportar datos para preparar mapas en GIS;{ width=7% }
4. Abrir Word para añadir el narrativo, copiar y pegar gráficos/mapas; { width=6% }
5. O: Juntar todo en un documento con Adobe Illustrator/InDesign. { width=5% }
<br>
## Desde "clic" hacia "script" (2)
Este flujo de trabajo **no** es lo más efectivo, sobre todo si el dataset utilizado cambia en continuación (por ejemplo con una encuesta Kobo):
- Los datos son manipulados haciendo clic manualmente;
- Los datos son exportados desde una herramienta hacia una otra (Excel, GIS, Word), utilizando varios formatos;
- Los resultados (gráficos, tablas), con copiados y pegados **manualmente** en el formato de publicación final.
**Este proceso es laborioso y deja un gran potencial para errores!**
{ width=20% }
## Desde "clic" hacia "script" (3)
Unas preguntas y problemas típicos revisando un análisis:
- Cuales cálculos se hicieron para llegar a las cifras?
- Tomaron en consideración x, y, z para el análisis?
- Se identificaron outliers?
- Ups! Hay un error / queremos hacer una revisión en los datos! Podemos repetir el análisis? Necesitamos las cifras, gráficos y tablas para el informe / presentación en media hora!
- Como co-autor o lector de un informe, quiero ver el proceso de investigación (como llegamos a una conclusión) y no solo el producto final con cifras/tablas agregadas.
## Desde "clic" hacia "script" (4)
Si todos los pasos de manejo de datos, análisis, visualización son hechos a través de una seria de comandos **escritos en script**, en lugar de **dozenas de clics**:
- Una vez identificado un error en los datos, o utilizando diferentes datasets, solo se necesita hacer cambios en el script y el informe se actualizará automaticamente;
- El manejo de datos deviene *de facto* llenamiente documentado (no se necesitan cambios manuales en Excel);
- El análisis es documentado y listo para una revisión colaborativa;
<br>
<br>
<br>
<br>
<br>
<br>
<font size="2">Estas diapositivas son adaptadas desde la presentación de Edouard Legoupil: http://edouard-legoupil.github.io/humanitaRian-data-science/</font>
# Primeros pasos en R
- La interfaz visual de RStudio
- Los paquetes de R
- Elementos básicos de R (paquete Swirl)
- Primeros pasos
- Vectores
- Matrices y marcos de datos
## Empezamos!
Una distinción importante:
<br>
- **R** es el lenguaje de programación
- Instalar **R**: https://cran.r-project.org/bin/windows/base/
<br>
- **RStudio** es la interfaz para utilizar R
- Instalar **RStudio**: https://www.rstudio.com/products/rstudio/download/
## Interfaz visual de RStudio
{ width=100% }
## Los paquetes de R
<br>
- R tiene una variedad enorme de usos: Si los incluyera todos, sería demasiado pesado!
<br>
- La versión base de R tiene varios comandos básicos;
<br>
- Los paquetes son **extensiones** de las funcionalidades básicas de R para usos específicos;
<br>
- Paquetes oficiales están en la plataforma CRAN;
__
<br>
- Paquetes nuevos/especificos se pueden también descargar desde otras fuentes.
## Elementos básicos de R: Ejercicios individuales
En la Console, insertar los comandos siguientes (INGRESAR después cada comando):
<br>
```{r}
- install.packages("swirl")
- library("swirl")
- select_language("spanish")
- swirl()
```
Después, seleccionar "1: R Programming" y vamos a seguir los módulos 1, 4 y 7.
## Elementos básicos de R
- Cómo interactuar con R
- Cálculos: 5 + 7
- Operatores: +, -, *, /, ^, sqrt()
- Crear variable: x <- 5+7
- Vectores: c(1, 2, 3)
- Buscar ayuda: ?función
## Vectores
- Tipos de vectores:
- Numérico: 1, 2.5, 1000
- Lógico: TRUE, FALSE
- Carácter: "hola"
- Otros
- Operadores lógicos: <, >, <=, >=, ==, !=, |, &
- Función paste()
## Matrices
- Datos "rectangulares": Como hojas de cálculo Excel
<br>
- Matrix: Mismo tipos de vectores
- Marco de datos: Diversos tipos de vectores (más común para nosotros!)
<br>
- Dimensiones: dim(x) Como las filas y columnas
- Largo: length(x)
- Tipo: class(x)
- Combinar columnas: cbind()
- Combinar filas: rbind()
- Nuevas matrices: matrix()
- Nuevos marcos de datos: data.frame()
- Cambiar nombre a columnas: colnames()
| b50b9f34805090b7d8db81880490329b7d76a51e | [
"Markdown",
"R",
"RMarkdown"
] | 21 | RMarkdown | IM-Peru/Cursos_R | 2b5a0b765f3a98c80db406e7113cd8a32f24d211 | 86f738c6e15ee16b56973c04466b1795f0896913 | |
refs/heads/master | <file_sep>var app= angular.module("myShoppingList",[]);
app.controller("myCtrl",function($scope){
$scope.products=["Milk","Bread","Cheese"];
$scope.addItem=function(){
$scope.errortext="";
if(!$scope.addMe){return;}
if($scope.products.indexOf($scope.addMe)== -1){
$scope.products.push($scope.addMe);
}else{
$scope.errortext="The item is already in your shopping list.";
}
$scope.products.push($scope.addMe);
};
$scope.removeItem= function(x){
$scope.errortext="";
$scope.products.splice(x,1);
}
}); | 0910cd096f81463e59811f69007046246fa3aa02 | [
"JavaScript"
] | 1 | JavaScript | alexwillchill/info3180-lab6 | d4c93fca98bf4c6d9551b614c0284ce8c6b83aa7 | 579b72f8ad3ebcd49f746ee63992c5474ac23398 | |
refs/heads/master | <file_sep>{
"allowCache": true,
"mediaSequence": 0,
"segments": [
{
"duration": 10,
"uri": "00001.ts"
},
{
"duration": 10,
"uri": "00002.ts"
}
],
"targetDuration": 10
}
<file_sep>{
"allowCache": true,
"mediaSequence": 0,
"segments": [
{
"duration": 10,
"uri": "/test/ts-files/zencoder/gogo/00001.ts"
}
],
"endList": true
}<file_sep>(function(window) {
/*
======== A Handy Little QUnit Reference ========
http://api.qunitjs.com/
Test methods:
module(name, {[setup][ ,teardown]})
test(name, callback)
expect(numberOfAssertions)
stop(increment)
start(decrement)
Test assertions:
ok(value, [message])
equal(actual, expected, [message])
notEqual(actual, expected, [message])
deepEqual(actual, expected, [message])
notDeepEqual(actual, expected, [message])
strictEqual(actual, expected, [message])
notStrictEqual(actual, expected, [message])
throws(block, [expected], [message])
*/
var
parser,
expectedHeader = [
0x46, 0x4c, 0x56, 0x01, 0x05, 0x00, 0x00, 0x00,
0x09, 0x00, 0x00, 0x00, 0x00
],
extend = window.videojs.util.mergeOptions,
testAudioTag,
testVideoTag,
testScriptTag,
asciiFromBytes,
testScriptString,
testScriptEcmaArray,
testNalUnit;
module('segment parser', {
setup: function() {
parser = new window.videojs.Hls.SegmentParser();
}
});
test('creates an flv header', function() {
var header = Array.prototype.slice.call(parser.getFlvHeader());
ok(header, 'the header is truthy');
equal(9 + 4, header.length, 'the header length is correct');
equal(header[0], 'F'.charCodeAt(0), 'the first character is "F"');
equal(header[1], 'L'.charCodeAt(0), 'the second character is "L"');
equal(header[2], 'V'.charCodeAt(0), 'the third character is "V"');
deepEqual(expectedHeader, header, 'the rest of the header is correct');
});
test('parses PMTs with program descriptors', function() {
var
makePmt = function(options) {
var
result = [],
entryCount = 0,
k,
sectionLength;
for (k in options.pids) {
entryCount++;
}
// table_id
result.push(0x02);
// section_syntax_indicator '0' reserved section_length
// 13 + (program_info_length) + (n * 5 + ES_info_length[n])
sectionLength = 13 + (5 * entryCount) + 17;
result.push(0x80 | (0xF00 & sectionLength >>> 8));
result.push(sectionLength & 0xFF);
// program_number
result.push(0x00);
result.push(0x01);
// reserved version_number current_next_indicator
result.push(0x01);
// section_number
result.push(0x00);
// last_section_number
result.push(0x00);
// reserved PCR_PID
result.push(0xe1);
result.push(0x00);
// reserved program_info_length
result.push(0xf0);
result.push(0x11); // hard-coded 17 byte descriptor
// program descriptors
result = result.concat([
0x25, 0x0f, 0xff, 0xff,
0x49, 0x44, 0x33, 0x20,
0xff, 0x49, 0x44, 0x33,
0x20, 0x00, 0x1f, 0x00,
0x01
]);
for (k in options.pids) {
// stream_type
result.push(options.pids[k]);
// reserved elementary_PID
result.push(0xe0 | (k & 0x1f00) >>> 8);
result.push(k & 0xff);
// reserved ES_info_length
result.push(0xf0);
result.push(0x00); // ES_info_length = 0
}
// CRC_32
result.push([0x00, 0x00, 0x00, 0x00]); // invalid CRC but we don't check it
return result;
},
makePat = function(options) {
var
result = [],
k;
// table_id
result.push(0x00);
// section_syntax_indicator '0' reserved section_length
result.push(0x80);
result.push(0x0d); // section_length for one program
// transport_stream_id
result.push(0x00);
result.push(0x00);
// reserved version_number current_next_indicator
result.push(0x01); // current_next_indicator is 1
// section_number
result.push(0x00);
// last_section_number
result.push(0x00);
for (k in options.programs) {
// program_number
result.push((k & 0xFF00) >>> 8);
result.push(k & 0x00FF);
// reserved program_map_pid
result.push((options.programs[k] & 0x1f00) >>> 8);
result.push(options.programs[k] & 0xff);
}
return result;
},
makePsi = function(options) {
var result = [];
// pointer_field
if (options.payloadUnitStartIndicator) {
result.push(0x00);
}
if (options.programs) {
return result.concat(makePat(options));
}
return result.concat(makePmt(options));
},
makePacket = function(options) {
var
result = [],
settings = extend({
payloadUnitStartIndicator: true,
pid: 0x00
}, options);
// header
// sync_byte
result.push(0x47);
// transport_error_indicator payload_unit_start_indicator transport_priority PID
result.push((settings.pid & 0x1f) << 8 | 0x40);
result.push(settings.pid & 0xff);
// transport_scrambling_control adaptation_field_control continuity_counter
result.push(0x10);
result = result.concat(makePsi(settings));
// ensure the resulting packet is the correct size
result.length = window.videojs.Hls.SegmentParser.MP2T_PACKET_LENGTH;
return result;
},
h264Type = window.videojs.Hls.SegmentParser.STREAM_TYPES.h264,
adtsType = window.videojs.Hls.SegmentParser.STREAM_TYPES.adts;
parser.parseSegmentBinaryData(new Uint8Array(makePacket({
programs: {
0x01: [0x01]
}
}).concat(makePacket({
pid: 0x01,
pids: {
0x02: h264Type, // h264 video
0x03: adtsType // adts audio
}
}))));
strictEqual(parser.stream.pmtPid, 0x01, 'PMT PID is 1');
strictEqual(parser.stream.programMapTable[h264Type], 0x02, 'video is PID 2');
strictEqual(parser.stream.programMapTable[adtsType], 0x03, 'audio is PID 3');
});
test('parses the first bipbop segment', function() {
parser.parseSegmentBinaryData(window.bcSegment);
ok(parser.tagsAvailable(), 'tags are available');
});
testAudioTag = function(tag) {
var
byte = tag.bytes[11],
format = (byte & 0xF0) >>> 4,
soundRate = byte & 0x03,
soundSize = (byte & 0x2) >>> 1,
soundType = byte & 0x1,
aacPacketType = tag.bytes[12];
equal(10, format, 'the audio format is aac');
equal(3, soundRate, 'the sound rate is 44kHhz');
equal(1, soundSize, 'the sound size is 16-bit samples');
equal(1, soundType, 'the sound type is stereo');
ok(aacPacketType === 0 || aacPacketType === 1, 'aac packets should have a valid type');
};
testVideoTag = function (tag) {
var
byte = tag.bytes[11],
frameType = (byte & 0xF0) >>> 4,
codecId = byte & 0x0F,
packetType = tag.bytes[12],
compositionTime = (tag.view.getInt32(13) & 0xFFFFFF00) >> 8;
// payload starts at tag.bytes[16]
// XXX: I'm not sure that frame types 3-5 are invalid
ok(frameType === 1 || frameType === 2,
'the frame type should be valid');
equal(7, codecId, 'the codec ID is AVC for h264');
ok(packetType <= 2 && packetType >= 0, 'the packet type is within [0, 2]');
if (packetType !== 1) {
equal(0,
compositionTime,
'the composition time is zero for non-NALU packets');
}
// TODO: the rest of the bytes are an NLU unit
if (packetType === 0) {
// AVC decoder configuration record
} else {
// NAL units
testNalUnit(tag.bytes.subarray(16));
}
};
testNalUnit = function(bytes) {
var
nalHeader = bytes[0];
// unitType = nalHeader & 0x1F;
equal(0, (nalHeader & 0x80) >>> 7, 'the first bit is always 0');
// equal(90, (nalHeader & 0x60) >>> 5, 'the NAL reference indicator is something');
// ok(unitType > 0, 'NAL unit type ' + unitType + ' is greater than 0');
// ok(unitType < 22 , 'NAL unit type ' + unitType + ' is less than 22');
};
asciiFromBytes = function(bytes) {
var
string = [],
i = bytes.byteLength;
while (i--) {
string[i] = String.fromCharCode(bytes[i]);
}
return string.join('');
};
testScriptString = function(tag, offset, expected) {
var
type = tag.bytes[offset],
stringLength = tag.view.getUint16(offset + 1),
string;
equal(2, type, 'the script element is of string type');
equal(stringLength, expected.length, 'the script string length is correct');
string = asciiFromBytes(tag.bytes.subarray(offset + 3,
offset + 3 + stringLength));
equal(expected, string, 'the string value is "' + expected + '"');
};
testScriptEcmaArray = function(tag, start) {
var
numItems = tag.view.getUint32(start),
i = numItems,
offset = start + 4,
length,
type;
while (i--) {
length = tag.view.getUint16(offset);
// advance offset to the property value
offset += 2 + length;
type = tag.bytes[offset];
ok(type === 1 || type === 0,
'the ecma array property value type is number or boolean');
offset++;
if (type) {
// boolean
ok(tag.bytes[offset] === 0 || tag.bytes[offset] === 1,
'the script boolean value is 0 or 1');
offset++;
} else {
// number
ok(!isNaN(tag.view.getFloat64(offset)), 'the value is not NaN');
offset += 8;
}
}
equal(tag.bytes[offset], 0, 'the property array terminator is valid');
equal(tag.bytes[offset + 1], 0, 'the property array terminator is valid');
equal(tag.bytes[offset + 2], 9, 'the property array terminator is valid');
};
testScriptTag = function(tag) {
testScriptString(tag, 11, 'onMetaData');
// the onMetaData object is stored as an 'ecma array', an array with non-
// integer indices (i.e. a dictionary or hash-map).
equal(8, tag.bytes[24], 'onMetaData is of ecma array type');
testScriptEcmaArray(tag, 25);
};
test('the flv tags are well-formed', function() {
var
byte,
tag,
type,
currentPts = 0,
lastTime = 0;
parser.parseSegmentBinaryData(window.bcSegment);
while (parser.tagsAvailable()) {
tag = parser.getNextTag();
type = tag.bytes[0];
ok(tag.pts >= currentPts, 'presentation time stamps are increasing');
currentPts = tag.pts;
// generic flv headers
switch (type) {
case 8: ok(true, 'the type is audio');
break;
case 9: ok(true, 'the type is video');
break;
case 18: ok(true, 'the type is script');
break;
default: ok(false, 'the type (' + type + ') is unrecognized');
}
byte = (tag.view.getUint32(1) & 0xFFFFFF00) >>> 8;
equal(tag.bytes.byteLength - 11 - 4, byte, 'the size field is correct');
byte = tag.view.getUint32(5) & 0xFFFFFF00;
ok(byte >= lastTime,
'timestamp is increasing. last pts: ' + lastTime + ' this pts: ' + byte);
lastTime = byte;
// tag type-specific headers
({
8: testAudioTag,
9: testVideoTag,
18: testScriptTag
})[type](tag);
// previous tag size
equal(tag.bytes.byteLength - 4,
tag.view.getUint32(tag.bytes.byteLength - 4),
'the size of the previous tag is correct');
}
});
})(window);
<file_sep>{
"allowCache": true,
"mediaSequence": 0,
"playlistType": "EVENT",
"segments": [
{
"duration": 10,
"uri": "/test/ts-files/zencoder/haze/Haze_Mantel_President_encoded_1200-00001.ts"
},
{
"duration": 10,
"uri": "/test/ts-files/zencoder/haze/Haze_Mantel_President_encoded_1200-00002.ts"
},
{
"duration": 10,
"uri": "/test/ts-files/zencoder/haze/Haze_Mantel_President_encoded_1200-00003.ts"
},
{
"duration": 10,
"uri": "/test/ts-files/zencoder/haze/Haze_Mantel_President_encoded_1200-00004.ts"
},
{
"duration": 10,
"uri": "/test/ts-files/zencoder/haze/Haze_Mantel_President_encoded_1200-00005.ts"
},
{
"duration": 8,
"uri": "/test/ts-files/zencoder/haze/Haze_Mantel_President_encoded_1200-00006.ts"
}
],
"targetDuration": 10,
"endList": true
}<file_sep>{
"allowCache": false,
"mediaSequence": 0,
"playlistType": "VOD",
"segments": [
{
"byterange": {
"length": 522828,
"offset": 0
},
"duration": 10,
"uri": "hls_450k_video.ts"
}
],
"targetDuration": 10,
"endList": true
}<file_sep>// Karma example configuration file
// NOTE: To configure Karma tests, do the following:
// 1. Copy this file and rename the copy with a .conf.js extension, for example: karma.conf.js
// 2. Configure the properties below in your conf.js copy
// 3. Run your tests
module.exports = function(config) {
var customLaunchers = {
chrome_sl: {
singleRun: true,
base: 'SauceLabs',
browserName: 'chrome',
platform: 'Windows 7',
version: '34'
},
firefox_sl: {
singleRun: true,
base: 'SauceLabs',
browserName: 'firefox',
platform: 'Windows 8'
},
safari_sl: {
singleRun: true,
base: 'SauceLabs',
browserName: 'safari',
platform: 'OS X 10.8'
},
ipad_sl: {
singleRun: true,
base: 'SauceLabs',
browserName: 'ipad',
platform:'OS X 10.9',
version: '7.1'
},
android_sl: {
singleRun: true,
base: 'SauceLabs',
browserName: 'android',
platform:'Linux'
}
};
config.set({
// base path, that will be used to resolve files and exclude
basePath: '',
frameworks: ['qunit'],
// Set autoWatch to true if you plan to run `grunt karma` continuously, to automatically test changes as you make them.
autoWatch: false,
// Setting singleRun to true here will start up your specified browsers, run tests, and then shut down the browsers. Helpful to have in a CI environment, where you don't want to leave browsers running continuously.
singleRun: true,
// custom launchers for sauce labs
//define SL browsers
customLaunchers: customLaunchers,
// Start these browsers
browsers: ['chrome_sl', 'ipad_sl'], //Object.keys(customLaunchers),
// List of files / patterns to load in the browser
// Add any new src files to this list.
// If you add new unit tests, they will be picked up automatically by Karma,
// unless you've added them to a nested directory, in which case you should
// add their paths to this list.
files: [
'../node_modules/sinon/lib/sinon.js',
'../node_modules/sinon/lib/sinon/util/event.js',
'../node_modules/sinon/lib/sinon/util/fake_xml_http_request.js',
'../node_modules/sinon/lib/sinon/util/xhr_ie.js',
'../node_modules/sinon/lib/sinon/util/fake_timers.js',
'../node_modules/video.js/dist/video-js/video.js',
'../node_modules/videojs-contrib-media-sources/src/videojs-media-sources.js',
'../node_modules/pkcs7/dist/pkcs7.unpad.js',
'../test/karma-qunit-shim.js',
'../src/videojs-hls.js',
'../src/xhr.js',
'../src/flv-tag.js',
'../src/exp-golomb.js',
'../src/h264-stream.js',
'../src/aac-stream.js',
'../src/segment-parser.js',
'../src/stream.js',
'../src/m3u8/m3u8-parser.js',
'../src/playlist-loader.js',
'../src/decrypter.js',
'../tmp/manifests.js',
'../tmp/expected.js',
'tsSegment-bc.js',
'../src/bin-utils.js',
'../test/*.js'
],
plugins: [
'karma-qunit',
'karma-chrome-launcher',
'karma-firefox-launcher',
'karma-ie-launcher',
'karma-opera-launcher',
'karma-phantomjs-launcher',
'karma-safari-launcher',
'karma-sauce-launcher'
],
// test results reporter to use
// possible values: 'dots', 'progress', 'junit'
reporters: ['dots', 'progress'],
// web server port
port: 9876,
// cli runner port
runnerPort: 9100,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
//logLevel: config.LOG_INFO,
// If browser does not capture in given timeout [ms], kill it
captureTimeout: 60000,
// global config for SauceLabs
sauceLabs: {
startConnect: true,
tunnelIdentifier: process.env.TRAVIS_JOB_NUMBER,
build: process.env.TRAVIS_BUILD_NUMBER,
testName: process.env.TRAVIS_BUILD_NUMBER + process.env.TRAVIS_BRANCH,
recordScreenshots: false
}
});
};
<file_sep>{
"allowCache": true,
"mediaSequence": 0,
"playlistType": "VOD",
"segments": [
{
"duration": 6.64,
"uri": "/test/ts-files/tvy7/8a5e2822668b5370f4eb1438b2564fb7ab12ffe1-hi720.ts"
},
{
"duration": 8,
"uri": "/test/ts-files/tvy7/56be1cef869a1c0cc8e38864ad1add17d187f051-hi720.ts"
},
{
"duration": 8,
"uri": "/test/ts-files/tvy7/549c8c77f55f049741a06596e5c1e01dacaa46d0-hi720.ts"
}
],
"targetDuration": 8
}
<file_sep># Live HLS Research
This document is a collection of notes on Live HLS implementations in the wild.
There are two varieties of Live HLS. In the first, playlists are
persistent and strictly appended to. In the alternative form, the
maximum number of segments in a playlist is relatively stable and an
old segment is removed every time a new segment becomes available.
On iOS devices, both stream types report a duration of `Infinity`. The
`currentTime` is equal to the amount of the stream that has been
played back on the device.
## Akamai HD2
## OnceLIVE
"Sliding window" live streams.
### Variant Playlists
Once variant playlists look like standard HLS variant playlists.
### Media Playlists
OnceLIVE uses "sliding window" manifests for live playback. The media
playlists do not have an `EXT-X-ENDLIST` and don't declare a
`EXT-X-PLAYLIST-TYPE`. On first request, the stream media playlist
returned four segment URLs with a starting media sequence of one,
preceded by a `EXT-X-DISCONTINUITY` tag. As playback progressed, that
number grew to 13 segment URLs, at which point it stabilized. That
would equate to a steady-state 65 second window at 5 seconds per
segment.
OnceLive documentation is [available on the Unicorn Media
website](http://www.unicornmedia.com/documents/2013/02/oncelive_implementationguide.pdf).
Here's a script to quickly parse out segment URLs:
```shell
curl $ONCE_MEDIA_PLAYLIST | grep '^http'
```
An example media playlist might look something like this:
```m3u8
#EXTM3U
#EXT-X-TARGETDURATION:5
#EXT-X-MEDIA-SEQUENCE:3
#EXTINF:5,3
http://example.com/0/1/content.ts?visitguid=uuid&asseturl=http://once.example.com/asset.lrm&failoverurl=http://example.com/blank.jpg
#EXTINF:5,4
http://example.com/1/2/content.ts?visitguid=uuid&asseturl=http://once.example.com/asset.lrm&failoverurl=http://example.com/blank.jpg
#EXTINF:5,5
http://example.com/2/3/content.ts?visitguid=uuid&asseturl=http://once.example.com/asset.lrm&failoverurl=http://example.com/blank.jpg
#EXTINF:5,6
http://example.com/3/4/content.ts?visitguid=uuid&asseturl=http://once.example.com/asset.lrm&failoverurl=http://example.com/blank.jpg
```
## Zencoder Live
<file_sep>(function(window, videojs, undefined) {
'use strict';
/*
======== A Handy Little QUnit Reference ========
http://api.qunitjs.com/
Test methods:
module(name, {[setup][ ,teardown]})
test(name, callback)
expect(numberOfAssertions)
stop(increment)
start(decrement)
Test assertions:
ok(value, [message])
equal(actual, expected, [message])
notEqual(actual, expected, [message])
deepEqual(actual, expected, [message])
notDeepEqual(actual, expected, [message])
strictEqual(actual, expected, [message])
notStrictEqual(actual, expected, [message])
throws(block, [expected], [message])
*/
// see docs/hlse.md for instructions on how test data was generated
var stringFromBytes = function(bytes) {
var result = '', i;
for (i = 0; i < bytes.length; i++) {
result += String.fromCharCode(bytes[i]);
}
return result;
};
module('Decryption');
test('decrypts a single AES-128 with PKCS7 block', function() {
var
key = new Uint32Array([0, 0, 0, 0]),
initVector = key,
// the string "<NAME>" encrypted
encrypted = new Uint8Array([
0xce, 0x90, 0x97, 0xd0,
0x08, 0x46, 0x4d, 0x18,
0x4f, 0xae, 0x01, 0x1c,
0x82, 0xa8, 0xf0, 0x67]);
deepEqual('how<NAME>',
stringFromBytes(videojs.Hls.decrypt(encrypted, key, initVector)),
'decrypted with a byte array key');
});
test('decrypts multiple AES-128 blocks with CBC', function() {
var
key = new Uint32Array([0, 0, 0, 0]),
initVector = key,
// the string "0123456789abcdef01234" encrypted
encrypted = new Uint8Array([
0x14, 0xf5, 0xfe, 0x74,
0x69, 0x66, 0xf2, 0x92,
0x65, 0x1c, 0x22, 0x88,
0xbb, 0xff, 0x46, 0x09,
0x0b, 0xde, 0x5e, 0x71,
0x77, 0x87, 0xeb, 0x84,
0xa9, 0x54, 0xc2, 0x45,
0xe9, 0x4e, 0x29, 0xb3
]);
deepEqual('0123456789abcdef01234',
stringFromBytes(videojs.Hls.decrypt(encrypted, key, initVector)),
'decrypted multiple blocks');
});
})(window, window.videojs);
<file_sep>var grunt = require('grunt'),
extname = require('path').extname;
grunt.file.recurse(process.cwd(), function(path) {
var json;
if (extname(path) === '.json') {
json = grunt.file.readJSON(path);
if (json.totalDuration) {
delete json.totalDuration;
grunt.file.write(path, JSON.stringify(json, null, ' '));
}
}
});
<file_sep>(function(window) {
'use strict';
var
sinonXhr,
clock,
requests,
videojs = window.videojs,
// Attempts to produce an absolute URL to a given relative path
// based on window.location.href
urlTo = function(path) {
return window.location.href
.split('/')
.slice(0, -1)
.concat([path])
.join('/');
};
module('Playlist Loader', {
setup: function() {
// fake XHRs
sinonXhr = sinon.useFakeXMLHttpRequest();
requests = [];
sinonXhr.onCreate = function(xhr) {
// force the XHR2 timeout polyfill
xhr.timeout = undefined;
requests.push(xhr);
};
// fake timers
clock = sinon.useFakeTimers();
},
teardown: function() {
sinonXhr.restore();
clock.restore();
}
});
test('throws if the playlist url is empty or undefined', function() {
throws(function() {
videojs.Hls.PlaylistLoader();
}, 'requires an argument');
throws(function() {
videojs.Hls.PlaylistLoader('');
}, 'does not accept the empty string');
});
test('starts without any metadata', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
strictEqual(loader.state, 'HAVE_NOTHING', 'no metadata has loaded yet');
});
test('requests the initial playlist immediately', function() {
new videojs.Hls.PlaylistLoader('master.m3u8');
strictEqual(requests.length, 1, 'made a request');
strictEqual(requests[0].url, 'master.m3u8', 'requested the initial playlist');
});
test('moves to HAVE_MASTER after loading a master playlist', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:\n' +
'media.m3u8\n');
ok(loader.master, 'the master playlist is available');
strictEqual(loader.state, 'HAVE_MASTER', 'the state is correct');
});
test('jumps to HAVE_METADATA when initialized with a media playlist', function() {
var
loadedmetadatas = 0,
loader = new videojs.Hls.PlaylistLoader('media.m3u8');
loader.on('loadedmetadata', function() {
loadedmetadatas++;
});
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXT-X-ENDLIST\n');
ok(loader.master, 'infers a master playlist');
ok(loader.media(), 'sets the media playlist');
ok(loader.media().uri, 'sets the media playlist URI');
strictEqual(loader.state, 'HAVE_METADATA', 'the state is correct');
strictEqual(requests.length, 0, 'no more requests are made');
strictEqual(loadedmetadatas, 1, 'fired one loadedmetadata');
});
test('jumps to HAVE_METADATA when initialized with a live media playlist', function() {
var loader = new videojs.Hls.PlaylistLoader('media.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n');
ok(loader.master, 'infers a master playlist');
ok(loader.media(), 'sets the media playlist');
strictEqual(loader.state, 'HAVE_METADATA', 'the state is correct');
});
test('moves to HAVE_METADATA after loading a media playlist', function() {
var
loadedPlaylist = 0,
loadedMetadata = 0,
loader = new videojs.Hls.PlaylistLoader('master.m3u8');
loader.on('loadedplaylist', function() {
loadedPlaylist++;
});
loader.on('loadedmetadata', function() {
loadedMetadata++;
});
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:\n' +
'media.m3u8\n' +
'alt.m3u8\n');
strictEqual(loadedPlaylist, 1, 'fired loadedplaylist once');
strictEqual(loadedMetadata, 0, 'did not fire loadedmetadata');
strictEqual(requests.length, 1, 'requests the media playlist');
strictEqual(requests[0].method, 'GET', 'GETs the media playlist');
strictEqual(requests[0].url,
urlTo('media.m3u8'),
'requests the first playlist');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n');
ok(loader.master, 'sets the master playlist');
ok(loader.media(), 'sets the media playlist');
strictEqual(loadedPlaylist, 2, 'fired loadedplaylist twice');
strictEqual(loadedMetadata, 1, 'fired loadedmetadata once');
strictEqual(loader.state, 'HAVE_METADATA', 'the state is correct');
});
test('moves to HAVE_CURRENT_METADATA when refreshing the playlist', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(10 * 1000); // 10s, one target duration
strictEqual(loader.state, 'HAVE_CURRENT_METADATA', 'the state is correct');
strictEqual(requests.length, 1, 'requested playlist');
strictEqual(requests[0].url,
urlTo('live.m3u8'),
'refreshes the media playlist');
});
test('returns to HAVE_METADATA after refreshing the playlist', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'1.ts\n');
strictEqual(loader.state, 'HAVE_METADATA', 'the state is correct');
});
test('emits an error when an initial playlist request fails', function() {
var
errors = [],
loader = new videojs.Hls.PlaylistLoader('master.m3u8');
loader.on('error', function() {
errors.push(loader.error);
});
requests.pop().respond(500);
strictEqual(errors.length, 1, 'emitted one error');
strictEqual(errors[0].status, 500, 'http status is captured');
});
test('errors when an initial media playlist request fails', function() {
var
errors = [],
loader = new videojs.Hls.PlaylistLoader('master.m3u8');
loader.on('error', function() {
errors.push(loader.error);
});
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:\n' +
'media.m3u8\n');
strictEqual(errors.length, 0, 'emitted no errors');
requests.pop().respond(500);
strictEqual(errors.length, 1, 'emitted one error');
strictEqual(errors[0].status, 500, 'http status is captured');
});
// http://tools.ietf.org/html/draft-pantos-http-live-streaming-12#section-6.3.4
test('halves the refresh timeout if a playlist is unchanged' +
'since the last reload', function() {
new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(10 * 1000); // trigger a refresh
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(5 * 1000); // half the default target-duration
strictEqual(requests.length, 1, 'sent a request');
strictEqual(requests[0].url,
urlTo('live.m3u8'),
'requested the media playlist');
});
test('media-sequence updates are considered a playlist change', function() {
new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(10 * 1000); // trigger a refresh
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:1\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(5 * 1000); // half the default target-duration
strictEqual(requests.length, 0, 'no request is sent');
});
test('emits an error if a media refresh fails', function() {
var
errors = 0,
errorResponseText = 'custom error message',
loader = new videojs.Hls.PlaylistLoader('live.m3u8');
loader.on('error', function() {
errors++;
});
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(10 * 1000); // trigger a refresh
requests.pop().respond(500, null, errorResponseText);
strictEqual(errors, 1, 'emitted an error');
strictEqual(loader.error.status, 500, 'captured the status code');
strictEqual(loader.error.responseText, errorResponseText, 'captured the responseText');
});
test('switches media playlists when requested', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n');
loader.media(loader.master.playlists[1]);
strictEqual(loader.state, 'SWITCHING_MEDIA', 'updated the state');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'high-0.ts\n');
strictEqual(loader.state, 'HAVE_METADATA', 'switched active media');
strictEqual(loader.media(),
loader.master.playlists[1],
'updated the active media');
});
test('can switch media playlists based on URI', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n');
loader.media('high.m3u8');
strictEqual(loader.state, 'SWITCHING_MEDIA', 'updated the state');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'high-0.ts\n');
strictEqual(loader.state, 'HAVE_METADATA', 'switched active media');
strictEqual(loader.media(),
loader.master.playlists[1],
'updated the active media');
});
test('aborts in-flight playlist refreshes when switching', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n');
clock.tick(10 * 1000);
loader.media('high.m3u8');
strictEqual(requests[0].aborted, true, 'aborted refresh request');
strictEqual(loader.state, 'SWITCHING_MEDIA', 'updated the state');
});
test('switching to the active playlist is a no-op', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n' +
'#EXT-X-ENDLIST\n');
loader.media('low.m3u8');
strictEqual(requests.length, 0, 'no requests are sent');
});
test('switching to the active live playlist is a no-op', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n');
loader.media('low.m3u8');
strictEqual(requests.length, 0, 'no requests are sent');
});
test('switches back to loaded playlists without re-requesting them', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n' +
'#EXT-X-ENDLIST\n');
loader.media('high.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'high-0.ts\n' +
'#EXT-X-ENDLIST\n');
loader.media('low.m3u8');
strictEqual(requests.length, 0, 'no outstanding requests');
strictEqual(loader.state, 'HAVE_METADATA', 'returned to loaded playlist');
});
test('aborts outstanding requests if switching back to an already loaded playlist', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n' +
'#EXT-X-ENDLIST\n');
loader.media('high.m3u8');
loader.media('low.m3u8');
strictEqual(requests.length, 1, 'requested high playlist');
ok(requests[0].aborted, 'aborted playlist request');
strictEqual(loader.state, 'HAVE_METADATA', 'returned to loaded playlist');
strictEqual(loader.media(), loader.master.playlists[0], 'switched to loaded playlist');
});
test('does not abort requests when the same playlist is re-requested', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n' +
'#EXT-X-ENDLIST\n');
loader.media('high.m3u8');
loader.media('high.m3u8');
strictEqual(requests.length, 1, 'made only one request');
ok(!requests[0].aborted, 'request not aborted');
});
test('throws an error if a media switch is initiated too early', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
throws(function() {
loader.media('high.m3u8');
}, 'threw an error from HAVE_NOTHING');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
throws(function() {
loader.media('high.m3u8');
}, 'throws an error from HAVE_MASTER');
});
test('throws an error if a switch to an unrecognized playlist is requested', function() {
var loader = new videojs.Hls.PlaylistLoader('master.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'media.m3u8\n');
throws(function() {
loader.media('unrecognized.m3u8');
}, 'throws an error');
});
test('dispose cancels the refresh timeout', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n');
loader.dispose();
// a lot of time passes...
clock.tick(15 * 1000);
strictEqual(requests.length, 0, 'no refresh request was made');
});
test('dispose aborts pending refresh requests', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n');
clock.tick(10 * 1000);
loader.dispose();
ok(requests[0].aborted, 'refresh request aborted');
});
test('errors if requests take longer than 45s', function() {
var
loader = new videojs.Hls.PlaylistLoader('media.m3u8'),
errors = 0;
loader.on('error', function() {
errors++;
});
clock.tick(45 * 1000);
strictEqual(errors, 1, 'fired one error');
strictEqual(loader.error.code, 2, 'fired a network error');
});
test('triggers an event when the active media changes', function() {
var
loader = new videojs.Hls.PlaylistLoader('master.m3u8'),
mediaChanges = 0;
loader.on('mediachange', function() {
mediaChanges++;
});
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=1\n' +
'low.m3u8\n' +
'#EXT-X-STREAM-INF:BANDWIDTH=2\n' +
'high.m3u8\n');
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n' +
'#EXT-X-ENDLIST\n');
strictEqual(mediaChanges, 0, 'initial selection is not a media change');
loader.media('high.m3u8');
strictEqual(mediaChanges, 0, 'mediachange does not fire immediately');
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'high-0.ts\n' +
'#EXT-X-ENDLIST\n');
strictEqual(mediaChanges, 1, 'fired a mediachange');
// switch back to an already loaded playlist
loader.media('low.m3u8');
strictEqual(mediaChanges, 2, 'fired a mediachange');
// trigger a no-op switch
loader.media('low.m3u8');
strictEqual(mediaChanges, 2, 'ignored a no-op media change');
});
test('does not misintrepret playlists missing newlines at the end', function() {
var loader = new videojs.Hls.PlaylistLoader('media.m3u8');
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'low-0.ts\n' +
'#EXT-X-ENDLIST'); // no newline
ok(loader.media().endList, 'flushed the final line of input');
});
})(window);
<file_sep>(function(videojs) {
module('H264 Stream');
var
nalUnitTypes = window.videojs.Hls.NALUnitType,
FlvTag = window.videojs.Hls.FlvTag;
test('metadata is generated for IDRs after a full NAL unit is written', function() {
var
h264Stream = new videojs.Hls.H264Stream(),
accessUnitDelimiter = new Uint8Array([
0x00,
0x00,
0x01,
nalUnitTypes.access_unit_delimiter_rbsp
]),
seqParamSet = new Uint8Array([
0x00,
0x00,
0x01,
0x60 | nalUnitTypes.seq_parameter_set_rbsp,
0x00, // profile_idc
0x00, // constraint_set flags
0x00, // level_idc
// seq_parameter_set_id ue(v) 0 => 1
// log2_max_frame_num_minus4 ue(v) 1 => 010
// pic_order_cnt_type ue(v) 0 => 1
// log2_max_pic_order_cnt_lsb_minus4 ue(v) 1 => 010
// max_num_ref_frames ue(v) 1 => 010
// gaps_in_frame_num_value_allowed u(1) 0
// pic_width_in_mbs_minus1 ue(v) 0 => 1
// pic_height_in_map_units_minus1 ue(v) 0 => 1
// frame_mbs_only_flag u(1) 1
// direct_8x8_inference_flag u(1) 0
// frame_cropping_flag u(1) 0
// vui_parameters_present_flag u(1) 0
// 1010 1010 0100 1110 00(00 0000)
0xAA,
0x4E,
0x00
]),
idr = new Uint8Array([
0x00,
0x00,
0x01,
nalUnitTypes.slice_layer_without_partitioning_rbsp_idr
]);
h264Stream.setNextTimeStamp(0, 0, true);
h264Stream.writeBytes(accessUnitDelimiter, 0, accessUnitDelimiter.byteLength);
h264Stream.writeBytes(seqParamSet, 0, seqParamSet.byteLength);
h264Stream.writeBytes(idr, 0, idr.byteLength);
h264Stream.setNextTimeStamp(1, 1, true);
strictEqual(h264Stream.tags.length, 3, 'three tags are written');
ok(FlvTag.isMetaData(h264Stream.tags[0].bytes),
'metadata is written');
ok(FlvTag.isVideoFrame(h264Stream.tags[1].bytes),
'picture parameter set is written');
ok(h264Stream.tags[2].keyFrame, 'key frame is written');
});
test('starting PTS values can be negative', function() {
var
h264Stream = new videojs.Hls.H264Stream(),
accessUnitDelimiter = new Uint8Array([
0x00,
0x00,
0x01,
nalUnitTypes.access_unit_delimiter_rbsp
]);
h264Stream.setNextTimeStamp(-100, -100, true);
h264Stream.writeBytes(accessUnitDelimiter, 0, accessUnitDelimiter.byteLength);
h264Stream.setNextTimeStamp(-99, -99, true);
h264Stream.writeBytes(accessUnitDelimiter, 0, accessUnitDelimiter.byteLength);
h264Stream.setNextTimeStamp(0, 0, true);
h264Stream.writeBytes(accessUnitDelimiter, 0, accessUnitDelimiter.byteLength);
// flush out the last tag
h264Stream.writeBytes(accessUnitDelimiter, 0, accessUnitDelimiter.byteLength);
strictEqual(h264Stream.tags.length, 3, 'three tags are ready');
strictEqual(h264Stream.tags[0].pts, 0, 'the first PTS is zero');
strictEqual(h264Stream.tags[0].dts, 0, 'the first DTS is zero');
strictEqual(h264Stream.tags[1].pts, 1, 'the second PTS is one');
strictEqual(h264Stream.tags[1].dts, 1, 'the second DTS is one');
strictEqual(h264Stream.tags[2].pts, 100, 'the third PTS is 100');
strictEqual(h264Stream.tags[2].dts, 100, 'the third DTS is 100');
});
})(window.videojs);
<file_sep>(function(window, videojs, undefined) {
'use strict';
/*
======== A Handy Little QUnit Reference ========
http://api.qunitjs.com/
Test methods:
module(name, {[setup][ ,teardown]})
test(name, callback)
expect(numberOfAssertions)
stop(increment)
start(decrement)
Test assertions:
ok(value, [message])
equal(actual, expected, [message])
notEqual(actual, expected, [message])
deepEqual(actual, expected, [message])
notDeepEqual(actual, expected, [message])
strictEqual(actual, expected, [message])
notStrictEqual(actual, expected, [message])
throws(block, [expected], [message])
*/
var
player,
oldMediaSourceOpen,
oldSegmentParser,
oldSetTimeout,
oldSourceBuffer,
oldFlashSupported,
oldNativeHlsSupport,
oldDecrypt,
requests,
xhr,
createPlayer = function(options) {
var tech, video, player;
video = document.createElement('video');
document.querySelector('#qunit-fixture').appendChild(video);
player = videojs(video, {
flash: {
swf: ''
},
hls: options || {}
});
player.buffered = function() {
return videojs.createTimeRange(0, 0);
};
tech = player.el().querySelector('.vjs-tech');
tech.vjs_getProperty = function() {};
tech.vjs_setProperty = function() {};
tech.vjs_src = function() {};
tech.vjs_play = function() {};
videojs.Flash.onReady(tech.id);
return player;
},
openMediaSource = function(player) {
player.hls.mediaSource.trigger({
type: 'sourceopen'
});
// endOfStream triggers an exception if flash isn't available
player.hls.mediaSource.endOfStream = function() {};
},
standardXHRResponse = function(request) {
if (!request.url) {
return;
}
var contentType = "application/json",
// contents off the global object
manifestName = (/(?:.*\/)?(.*)\.m3u8/).exec(request.url);
if (manifestName) {
manifestName = manifestName[1];
} else {
manifestName = request.url;
}
if (/\.m3u8?/.test(request.url)) {
contentType = 'application/vnd.apple.mpegurl';
} else if (/\.ts/.test(request.url)) {
contentType = 'video/MP2T';
}
request.response = new Uint8Array([1]).buffer;
request.respond(200,
{ 'Content-Type': contentType },
window.manifests[manifestName]);
},
mockSegmentParser = function(tags) {
if (tags === undefined) {
tags = [];
}
return function() {
this.getFlvHeader = function() {
return 'flv';
};
this.parseSegmentBinaryData = function() {};
this.flushTags = function() {};
this.tagsAvailable = function() {
return tags.length;
};
this.getTags = function() {
return tags;
};
this.getNextTag = function() {
return tags.shift();
};
};
};
module('HLS', {
setup: function() {
oldMediaSourceOpen = videojs.MediaSource.open;
videojs.MediaSource.open = function() {};
// mock out Flash features for phantomjs
oldFlashSupported = videojs.Flash.isSupported;
videojs.Flash.isSupported = function() {
return true;
};
oldSourceBuffer = window.videojs.SourceBuffer;
window.videojs.SourceBuffer = function() {
this.appendBuffer = function() {};
this.abort = function() {};
};
// store functionality that some tests need to mock
oldSegmentParser = videojs.Hls.SegmentParser;
oldSetTimeout = window.setTimeout;
oldNativeHlsSupport = videojs.Hls.supportsNativeHls;
oldDecrypt = videojs.Hls.decrypt;
videojs.Hls.decrypt = function() {
return new Uint8Array([0]);
};
// fake XHRs
xhr = sinon.useFakeXMLHttpRequest();
requests = [];
xhr.onCreate = function(xhr) {
requests.push(xhr);
};
// create the test player
player = createPlayer();
},
teardown: function() {
player.dispose();
videojs.Flash.isSupported = oldFlashSupported;
videojs.MediaSource.open = oldMediaSourceOpen;
videojs.Hls.SegmentParser = oldSegmentParser;
videojs.Hls.supportsNativeHls = oldNativeHlsSupport;
videojs.Hls.decrypt = oldDecrypt;
videojs.SourceBuffer = oldSourceBuffer;
window.setTimeout = oldSetTimeout;
xhr.restore();
}
});
test('starts playing if autoplay is specified', function() {
var plays = 0;
player.options().autoplay = true;
player.src({
src: 'manifest/playlist.m3u8',
type: 'application/vnd.apple.mpegurl'
});
// make sure play() is called *after* the media source opens
player.play = function() {
plays++;
};
openMediaSource(player);
standardXHRResponse(requests[0]);
strictEqual(1, plays, 'play was called');
});
test('creates a PlaylistLoader on init', function() {
var loadedmetadata = false;
player.on('loadedmetadata', function() {
loadedmetadata = true;
});
ok(!player.hls.playlists, 'waits for set src to create the loader');
player.src({
src:'manifest/playlist.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
ok(loadedmetadata, 'loadedmetadata fires');
ok(player.hls.playlists.master, 'set the master playlist');
ok(player.hls.playlists.media(), 'set the media playlist');
ok(player.hls.playlists.media().segments, 'the segment entries are parsed');
strictEqual(player.hls.playlists.master.playlists[0],
player.hls.playlists.media(),
'the playlist is selected');
});
test('sets the duration if one is available on the playlist', function() {
var calls = 0;
player.duration = function(value) {
if (value === undefined) {
return 0;
}
calls++;
};
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
strictEqual(calls, 1, 'duration is set');
standardXHRResponse(requests[1]);
strictEqual(calls, 1, 'duration is set');
});
test('calculates the duration if needed', function() {
var durations = [];
player.duration = function(duration) {
if (duration === undefined) {
return 0;
}
durations.push(duration);
};
player.src({
src: 'http://example.com/manifest/missingExtinf.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
strictEqual(durations.length, 1, 'duration is set');
strictEqual(durations[0],
player.hls.playlists.media().segments.length * 10,
'duration is calculated');
});
test('starts downloading a segment on loadedmetadata', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
player.buffered = function() {
return videojs.createTimeRange(0, 0);
};
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
strictEqual(requests[1].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media-00001.ts',
'the first segment is requested');
});
test('recognizes absolute URIs and requests them unmodified', function() {
player.src({
src: 'manifest/absoluteUris.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
strictEqual(requests[1].url,
'http://example.com/00001.ts',
'the first segment is requested');
});
test('recognizes domain-relative URLs', function() {
player.src({
src: 'manifest/domainUris.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
strictEqual(requests[1].url,
window.location.origin + '/00001.ts',
'the first segment is requested');
});
test('re-initializes the tech for each source', function() {
var firstPlaylists, secondPlaylists, firstMSE, secondMSE, aborts;
aborts = 0;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
firstPlaylists = player.hls.playlists;
firstMSE = player.hls.mediaSource;
player.hls.sourceBuffer.abort = function() {
aborts++;
};
standardXHRResponse(requests.shift());
standardXHRResponse(requests.shift());
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
secondPlaylists = player.hls.playlists;
secondMSE = player.hls.mediaSource;
equal(1, aborts, 'aborted the old source buffer');
ok(requests[0].aborted, 'aborted the old segment request');
notStrictEqual(firstPlaylists, secondPlaylists, 'the playlist object is not reused');
notStrictEqual(firstMSE, secondMSE, 'the media source object is not reused');
});
test('triggers an error when a master playlist request errors', function() {
var errors = 0;
player.on('error', function() {
errors++;
});
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.pop().respond(500);
ok(player.error(), 'an error is triggered');
strictEqual(1, errors, 'fired one error');
strictEqual(2, player.error().code, 'a network error is triggered');
});
test('downloads media playlists after loading the master', function() {
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// set bandwidth to a high number, so, we don't switch;
player.hls.bandwidth = 500000;
standardXHRResponse(requests[1]);
standardXHRResponse(requests[2]);
strictEqual(requests[0].url, 'manifest/master.m3u8', 'master playlist requested');
strictEqual(requests[1].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media.m3u8',
'media playlist requested');
strictEqual(requests[2].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media-00001.ts',
'first segment requested');
});
test('upshift if initial bandwidth is high', function() {
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.playlists.setBandwidth = function() {
player.hls.playlists.bandwidth = 1000000000;
};
standardXHRResponse(requests[1]);
standardXHRResponse(requests[2]);
standardXHRResponse(requests[3]);
strictEqual(requests[0].url, 'manifest/master.m3u8', 'master playlist requested');
strictEqual(requests[1].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media.m3u8',
'media playlist requested');
strictEqual(requests[2].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media3.m3u8',
'media playlist requested');
strictEqual(requests[3].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media3-00001.ts',
'first segment requested');
});
test('dont downshift if bandwidth is low', function() {
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.playlists.setBandwidth = function() {
player.hls.playlists.bandwidth = 100;
};
standardXHRResponse(requests[1]);
standardXHRResponse(requests[2]);
strictEqual(requests[0].url, 'manifest/master.m3u8', 'master playlist requested');
strictEqual(requests[1].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media.m3u8',
'media playlist requested');
strictEqual(requests[2].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media-00001.ts',
'first segment requested');
});
test('timeupdates do not check to fill the buffer until a media playlist is ready', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.trigger('timeupdate');
strictEqual(1, requests.length, 'one request was made');
strictEqual('manifest/media.m3u8', requests[0].url, 'media playlist requested');
});
test('calculates the bandwidth after downloading a segment', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// set the request time to be a bit earlier so our bandwidth calculations are not NaN
requests[1].requestTime = (new Date())-100;
standardXHRResponse(requests[1]);
ok(player.hls.bandwidth, 'bandwidth is calculated');
ok(player.hls.bandwidth > 0,
'bandwidth is positive: ' + player.hls.bandwidth);
ok(player.hls.segmentXhrTime >= 0,
'saves segment request time: ' + player.hls.segmentXhrTime + 's');
});
test('selects a playlist after segment downloads', function() {
var calls = 0;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
player.hls.selectPlaylist = function() {
calls++;
return player.hls.playlists.master.playlists[0];
};
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.bandwidth = 3000000;
standardXHRResponse(requests[1]);
standardXHRResponse(requests[2]);
strictEqual(calls, 2, 'selects after the initial segment');
player.currentTime = function() {
return 1;
};
player.buffered = function() {
return videojs.createTimeRange(0, 2);
};
player.trigger('timeupdate');
standardXHRResponse(requests[3]);
strictEqual(calls, 3, 'selects after additional segments');
});
test('moves to the next segment if there is a network error', function() {
var mediaIndex;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.bandwidth = 3000000;
standardXHRResponse(requests[1]);
mediaIndex = player.hls.mediaIndex;
player.trigger('timeupdate');
requests[2].respond(400);
strictEqual(mediaIndex + 1, player.hls.mediaIndex, 'media index is incremented');
});
test('updates the duration after switching playlists', function() {
var
calls = 0,
selectedPlaylist = false;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
player.hls.selectPlaylist = function() {
selectedPlaylist = true;
return player.hls.playlists.master.playlists[1];
};
player.duration = function(duration) {
if (duration === undefined) {
return 0;
}
// only track calls that occur after the playlist has been switched
if (player.hls.playlists.media() === player.hls.playlists.master.playlists[1]) {
calls++;
}
};
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
standardXHRResponse(requests[2]);
standardXHRResponse(requests[3]);
ok(selectedPlaylist, 'selected playlist');
strictEqual(calls, 1, 'updates the duration');
});
test('downloads additional playlists if required', function() {
var
called = false,
playlist = {
uri: 'media3.m3u8'
};
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.bandwidth = 3000000;
standardXHRResponse(requests[1]);
// before an m3u8 is downloaded, no segments are available
player.hls.selectPlaylist = function() {
if (!called) {
called = true;
return playlist;
}
playlist.segments = [1, 1, 1];
return playlist;
};
// the playlist selection is revisited after a new segment is downloaded
player.trigger('timeupdate');
standardXHRResponse(requests[2]);
standardXHRResponse(requests[3]);
strictEqual(4, requests.length, 'requests were made');
strictEqual(requests[3].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/' +
playlist.uri,
'made playlist request');
strictEqual(playlist.uri,
player.hls.playlists.media().uri,
'a new playlists was selected');
ok(player.hls.playlists.media().segments, 'segments are now available');
});
test('selects a playlist below the current bandwidth', function() {
var playlist;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// the default playlist has a really high bitrate
player.hls.playlists.master.playlists[0].attributes.BANDWIDTH = 9e10;
// playlist 1 has a very low bitrate
player.hls.playlists.master.playlists[1].attributes.BANDWIDTH = 1;
// but the detected client bandwidth is really low
player.hls.bandwidth = 10;
playlist = player.hls.selectPlaylist();
strictEqual(playlist,
player.hls.playlists.master.playlists[1],
'the low bitrate stream is selected');
});
test('raises the minimum bitrate for a stream proportionially', function() {
var playlist;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// the default playlist's bandwidth + 10% is equal to the current bandwidth
player.hls.playlists.master.playlists[0].attributes.BANDWIDTH = 10;
player.hls.bandwidth = 11;
// 9.9 * 1.1 < 11
player.hls.playlists.master.playlists[1].attributes.BANDWIDTH = 9.9;
playlist = player.hls.selectPlaylist();
strictEqual(playlist,
player.hls.playlists.master.playlists[1],
'a lower bitrate stream is selected');
});
test('uses the lowest bitrate if no other is suitable', function() {
var playlist;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// the lowest bitrate playlist is much greater than 1b/s
player.hls.bandwidth = 1;
playlist = player.hls.selectPlaylist();
// playlist 1 has the lowest advertised bitrate
strictEqual(playlist,
player.hls.playlists.master.playlists[1],
'the lowest bitrate stream is selected');
});
test('selects the correct rendition by player dimensions', function() {
var playlist;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.width(640);
player.height(360);
player.hls.bandwidth = 3000000;
playlist = player.hls.selectPlaylist();
deepEqual(playlist.attributes.RESOLUTION, {width:396,height:224},'should return the correct resolution by player dimensions');
equal(playlist.attributes.BANDWIDTH, 440000, 'should have the expected bandwidth in case of multiple');
player.width(1920);
player.height(1080);
player.hls.bandwidth = 3000000;
playlist = player.hls.selectPlaylist();
deepEqual(playlist.attributes.RESOLUTION, {width:960,height:540},'should return the correct resolution by player dimensions');
equal(playlist.attributes.BANDWIDTH, 1928000, 'should have the expected bandwidth in case of multiple');
});
test('does not download the next segment if the buffer is full', function() {
var currentTime = 15;
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
player.currentTime = function() {
return currentTime;
};
player.buffered = function() {
return videojs.createTimeRange(0, currentTime + videojs.Hls.GOAL_BUFFER_LENGTH);
};
openMediaSource(player);
standardXHRResponse(requests[0]);
player.trigger('timeupdate');
strictEqual(requests.length, 1, 'no segment request was made');
});
test('downloads the next segment if the buffer is getting low', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
strictEqual(requests.length, 2, 'did not make a request');
player.currentTime = function() {
return 15;
};
player.buffered = function() {
return videojs.createTimeRange(0, 19.999);
};
player.trigger('timeupdate');
standardXHRResponse(requests[2]);
strictEqual(requests.length, 3, 'made a request');
strictEqual(requests[2].url,
window.location.origin +
window.location.pathname.split('/').slice(0, -1).join('/') +
'/manifest/media-00002.ts',
'made segment request');
});
test('stops downloading segments at the end of the playlist', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
requests = [];
player.hls.mediaIndex = 4;
player.trigger('timeupdate');
strictEqual(requests.length, 0, 'no request is made');
});
test('only makes one segment request at a time', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.pop());
player.trigger('timeupdate');
strictEqual(1, requests.length, 'one XHR is made');
player.trigger('timeupdate');
strictEqual(1, requests.length, 'only one XHR is made');
});
test('cancels outstanding XHRs when seeking', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.media = {
segments: [{
uri: '0.ts',
duration: 10
}, {
uri: '1.ts',
duration: 10
}]
};
// trigger a segment download request
player.trigger('timeupdate');
// attempt to seek while the download is in progress
player.currentTime(7);
ok(requests[1].aborted, 'XHR aborted');
strictEqual(requests.length, 3, 'opened new XHR');
});
test('when outstanding XHRs are cancelled, they get aborted properly', function() {
var readystatechanges = 0;
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// trigger a segment download request
player.trigger('timeupdate');
player.hls.segmentXhr_.onreadystatechange = function() {
readystatechanges++;
};
// attempt to seek while the download is in progress
player.currentTime(12);
ok(requests[1].aborted, 'XHR aborted');
strictEqual(requests.length, 3, 'opened new XHR');
notEqual(player.hls.segmentXhr_.url, requests[1].url, 'a new segment is request that is not the aborted one');
strictEqual(readystatechanges, 0, 'onreadystatechange was not called');
});
test('segmentXhr is properly nulled out when dispose is called', function() {
var
readystatechanges = 0,
oldDispose = videojs.Flash.prototype.dispose;
videojs.Flash.prototype.dispose = function() {};
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
// trigger a segment download request
player.trigger('timeupdate');
player.hls.segmentXhr_.onreadystatechange = function() {
readystatechanges++;
};
player.hls.dispose();
ok(requests[1].aborted, 'XHR aborted');
strictEqual(requests.length, 2, 'did not open a new XHR');
equal(player.hls.segmentXhr_, null, 'the segment xhr is nulled out');
strictEqual(readystatechanges, 0, 'onreadystatechange was not called');
videojs.Flash.prototype.dispose = oldDispose;
});
test('flushes the parser after each segment', function() {
var flushes = 0;
// mock out the segment parser
videojs.Hls.SegmentParser = function() {
this.getFlvHeader = function() {
return [];
};
this.parseSegmentBinaryData = function() {};
this.flushTags = function() {
flushes++;
};
this.tagsAvailable = function() {};
};
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
strictEqual(flushes, 1, 'tags are flushed at the end of a segment');
});
test('drops tags before the target timestamp when seeking', function() {
var i = 10,
tags = [],
bytes = [];
// mock out the parser and source buffer
videojs.Hls.SegmentParser = mockSegmentParser(tags);
window.videojs.SourceBuffer = function() {
this.appendBuffer = function(chunk) {
bytes.push(chunk);
};
this.abort = function() {};
};
// push a tag into the buffer
tags.push({ pts: 0, bytes: 0 });
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
// mock out a new segment of FLV tags
bytes = [];
while (i--) {
tags.unshift({
pts: i * 1000,
bytes: i
});
}
player.currentTime(7);
standardXHRResponse(requests[2]);
deepEqual(bytes, [7,8,9], 'three tags are appended');
});
test('calls abort() on the SourceBuffer before seeking', function() {
var
aborts = 0,
bytes = [],
tags = [{ pts: 0, bytes: 0 }];
// track calls to abort()
videojs.Hls.SegmentParser = mockSegmentParser(tags);
window.videojs.SourceBuffer = function() {
this.appendBuffer = function(chunk) {
bytes.push(chunk);
};
this.abort = function() {
aborts++;
};
};
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
// drainBuffer() uses the first PTS value to account for any timestamp discontinuities in the stream
// adding a tag with a PTS of zero looks like a stream with no discontinuities
tags.push({ pts: 0, bytes: 0 });
tags.push({ pts: 7000, bytes: 7 });
// seek to 7s
player.currentTime(7);
standardXHRResponse(requests[2]);
strictEqual(1, aborts, 'aborted pending buffer');
});
test('playlist 404 should trigger MEDIA_ERR_NETWORK', function() {
var errorTriggered = false;
player.on('error', function() {
errorTriggered = true;
});
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.pop().respond(404);
equal(errorTriggered,
true,
'Missing Playlist error event should trigger');
equal(player.error().code,
2,
'Player error code should be set to MediaError.MEDIA_ERR_NETWORK');
ok(player.error().message, 'included an error message');
});
test('segment 404 should trigger MEDIA_ERR_NETWORK', function () {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
requests[1].respond(404);
ok(player.hls.error.message, 'an error message is available');
equal(2, player.hls.error.code, 'Player error code should be set to MediaError.MEDIA_ERR_NETWORK');
});
test('segment 500 should trigger MEDIA_ERR_ABORTED', function () {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
requests[1].respond(500);
ok(player.hls.error.message, 'an error message is available');
equal(4, player.hls.error.code, 'Player error code should be set to MediaError.MEDIA_ERR_ABORTED');
});
test('duration is Infinity for live playlists', function() {
player.src({
src: 'http://example.com/manifest/missingEndlist.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
strictEqual(player.duration(), Infinity, 'duration is infinity');
ok((' ' + player.el().className + ' ').indexOf(' vjs-live ') >= 0, 'added vjs-live class');
});
test('updates the media index when a playlist reloads', function() {
player.src({
src: 'http://example.com/live-updating.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests[0].respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXTINF:10,\n' +
'2.ts\n');
standardXHRResponse(requests[1]);
// play the stream until 2.ts is playing
player.hls.mediaIndex = 3;
// reload the updated playlist
player.hls.playlists.media = function() {
return {
segments: [{
uri: '1.ts'
}, {
uri: '2.ts'
}, {
uri: '3.ts'
}]
};
};
player.hls.playlists.trigger('loadedplaylist');
strictEqual(player.hls.mediaIndex, 2, 'mediaIndex is updated after the reload');
});
test('live playlist starts 30s before live', function() {
player.src({
src: 'http://example.com/manifest/liveStart30sBefore.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
player.hls.playlists.trigger('loadedmetadata');
strictEqual(player.hls.mediaIndex, 6, 'mediaIndex is updated after the reload');
});
test('mediaIndex is zero before the first segment loads', function() {
window.manifests['first-seg-load'] =
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n';
player.src({
src: 'http://example.com/first-seg-load.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
strictEqual(player.hls.mediaIndex, 0, 'mediaIndex is zero');
});
test('reloads out-of-date live playlists when switching variants', function() {
player.src({
src: 'http://example.com/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.master = {
playlists: [{
mediaSequence: 15,
segments: [1, 1, 1]
}, {
uri: 'http://example.com/variant-update.m3u8',
mediaSequence: 0,
segments: [1, 1]
}]
};
// playing segment 15 on playlist zero
player.hls.media = player.hls.master.playlists[0];
player.mediaIndex = 1;
window.manifests['variant-update'] = '#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:16\n' +
'#EXTINF:10,\n' +
'16.ts\n' +
'#EXTINF:10,\n' +
'17.ts\n';
// switch playlists
player.hls.selectPlaylist = function() {
return player.hls.master.playlists[1];
};
// timeupdate downloads segment 16 then switches playlists
player.trigger('timeupdate');
strictEqual(player.mediaIndex, 1, 'mediaIndex points at the next segment');
});
test('if withCredentials option is used, withCredentials is set on the XHR object', function() {
player.dispose();
player = createPlayer({
withCredentials: true
});
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
ok(requests[0].withCredentials, "with credentials should be set to true if that option is passed in");
});
test('does not break if the playlist has no segments', function() {
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
try {
openMediaSource(player);
requests[0].respond(200, null,
'#EXTM3U\n' +
'#EXT-X-PLAYLIST-TYPE:VOD\n' +
'#EXT-X-TARGETDURATION:10\n');
} catch(e) {
ok(false, 'an error was thrown');
throw e;
}
ok(true, 'no error was thrown');
strictEqual(requests.length, 1, 'no requests for non-existent segments were queued');
});
test('waits until the buffer is empty before appending bytes at a discontinuity', function() {
var aborts = 0, setTime, currentTime, bufferEnd;
player.src({
src: 'disc.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.currentTime = function() { return currentTime; };
player.buffered = function() {
return videojs.createTimeRange(0, bufferEnd);
};
player.hls.sourceBuffer.abort = function() {
aborts++;
};
player.hls.el().vjs_setProperty = function(name, value) {
if (name === 'currentTime') {
return setTime = value;
}
};
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,0\n' +
'1.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:10,0\n' +
'2.ts\n');
standardXHRResponse(requests.pop());
// play to 6s to trigger the next segment request
currentTime = 6;
bufferEnd = 10;
player.trigger('timeupdate');
strictEqual(aborts, 0, 'no aborts before the buffer empties');
standardXHRResponse(requests.pop());
strictEqual(aborts, 0, 'no aborts before the buffer empties');
// pretend the buffer has emptied
player.trigger('waiting');
strictEqual(aborts, 1, 'aborted before appending the new segment');
strictEqual(setTime, 10, 'updated the time after crossing the discontinuity');
});
test('clears the segment buffer on seek', function() {
var aborts = 0, tags = [], currentTime, bufferEnd, oldCurrentTime;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'disc.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
oldCurrentTime = player.currentTime;
player.currentTime = function(time) {
if (time !== undefined) {
return oldCurrentTime.call(player, time);
}
return currentTime;
};
player.buffered = function() {
return videojs.createTimeRange(0, bufferEnd);
};
player.hls.sourceBuffer.abort = function() {
aborts++;
};
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,0\n' +
'1.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:10,0\n' +
'2.ts\n');
standardXHRResponse(requests.pop());
// play to 6s to trigger the next segment request
currentTime = 6;
bufferEnd = 10;
player.trigger('timeupdate');
standardXHRResponse(requests.pop());
// seek back to the beginning
player.currentTime(0);
tags.push({ pts: 0, bytes: 0 });
standardXHRResponse(requests.pop());
strictEqual(aborts, 1, 'aborted once for the seek');
// the source buffer empties. is 2.ts still in the segment buffer?
player.trigger('waiting');
strictEqual(aborts, 1, 'cleared the segment buffer on a seek');
});
test('resets the switching algorithm if a request times out', function() {
player.src({
src: 'master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift()); // master
player.hls.bandwidth = 3000000;
standardXHRResponse(requests.shift()); // media.m3u8
// simulate a segment timeout
requests[0].timedout = true;
requests.shift().abort();
standardXHRResponse(requests.shift());
strictEqual(player.hls.playlists.media(),
player.hls.playlists.master.playlists[1],
'reset to the lowest bitrate playlist');
});
test('disposes the playlist loader', function() {
var disposes = 0, player, loaderDispose;
player = createPlayer();
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
loaderDispose = player.hls.playlists.dispose;
player.hls.playlists.dispose = function() {
disposes++;
loaderDispose.call(player.hls.playlists);
};
player.dispose();
strictEqual(disposes, 1, 'disposed playlist loader');
});
test('remove event handlers on dispose', function() {
var
player,
onhandlers = 0,
offhandlers = 0,
oldOn,
oldOff;
player = createPlayer();
oldOn = player.on;
oldOff = player.off;
player.on = function(type, handler) {
onhandlers++;
oldOn.call(player, type, handler);
};
player.off = function(type, handler) {
// ignore the top-level videojs removals that aren't relevant to HLS
if (type && type !== 'dispose') {
offhandlers++;
}
oldOff.call(player, type, handler);
};
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
player.dispose();
equal(offhandlers, onhandlers, 'the amount of on and off handlers is the same');
player.off = oldOff;
player.on = oldOn;
});
test('aborts the source buffer on disposal', function() {
var aborts = 0, player;
player = createPlayer();
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.sourceBuffer.abort = function() {
aborts++;
};
player.dispose();
strictEqual(aborts, 1, 'aborted the source buffer');
});
test('only supports HLS MIME types', function() {
ok(videojs.Hls.canPlaySource({
type: 'aPplicatiOn/x-MPegUrl'
}), 'supports x-mpegurl');
ok(videojs.Hls.canPlaySource({
type: 'aPplicatiOn/VnD.aPPle.MpEgUrL'
}), 'supports vnd.apple.mpegurl');
ok(!videojs.Hls.canPlaySource({
type: 'video/mp4'
}), 'does not support mp4');
ok(!videojs.Hls.canPlaySource({
type: 'video/x-flv'
}), 'does not support flv');
});
test('adds Hls to the default tech order', function() {
strictEqual(videojs.options.techOrder[0], 'hls', 'first entry is Hls');
});
test('has no effect if native HLS is available', function() {
var player;
videojs.Hls.supportsNativeHls = true;
player = createPlayer();
player.src({
src: 'http://example.com/manifest/master.m3u8',
type: 'application/x-mpegURL'
});
ok(!player.hls, 'did not load hls tech');
player.dispose();
});
test('is not supported on browsers without typed arrays', function() {
var oldArray = window.Uint8Array;
window.Uint8Array = null;
ok(!videojs.Hls.isSupported(), 'HLS is not supported');
// cleanup
window.Uint8Array = oldArray;
});
test('tracks the bytes downloaded', function() {
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
strictEqual(player.hls.bytesReceived, 0, 'no bytes received');
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXT-X-ENDLIST\n');
// transmit some segment bytes
requests[0].response = new ArrayBuffer(17);
requests.shift().respond(200, null, '');
strictEqual(player.hls.bytesReceived, 17, 'tracked bytes received');
player.trigger('timeupdate');
// transmit some more
requests[0].response = new ArrayBuffer(5);
requests.shift().respond(200, null, '');
strictEqual(player.hls.bytesReceived, 22, 'tracked more bytes');
});
test('re-emits mediachange events', function() {
var mediaChanges = 0;
player.on('mediachange', function() {
mediaChanges++;
});
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.playlists.trigger('mediachange');
strictEqual(mediaChanges, 1, 'fired mediachange');
});
test('can be disposed before finishing initialization', function() {
var player = createPlayer(), readyHandlers = [];
player.ready = function(callback) {
readyHandlers.push(callback);
};
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
player.src({
src: 'http://example.com/media.mp4',
type: 'video/mp4'
});
ok(readyHandlers.length > 0, 'registered a ready handler');
try {
while (readyHandlers.length) {
readyHandlers.shift().call(player);
}
ok(true, 'did not throw an exception');
} catch (e) {
ok(false, 'threw an exception');
}
});
test('calls ended() on the media source at the end of a playlist', function() {
var endOfStreams = 0;
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.mediaSource.endOfStream = function() {
endOfStreams++;
};
// playlist response
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXT-X-ENDLIST\n');
// segment response
requests[0].response = new ArrayBuffer(17);
requests.shift().respond(200, null, '');
strictEqual(endOfStreams, 1, 'ended media source');
});
test('calling play() at the end of a video resets the media index', function() {
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift());
strictEqual(player.hls.mediaIndex, 1, 'index is 1 after the first segment');
player.hls.ended = function() {
return true;
};
player.play();
strictEqual(player.hls.mediaIndex, 0, 'index is 1 after the first segment');
});
test('calling fetchKeys() when a new playlist is loaded will create an XHR', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
var oldMedia = player.hls.playlists.media;
player.hls.playlists.media = function() {
return {
segments: [{
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=52'
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53'
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]
};
};
player.hls.playlists.trigger('loadedplaylist');
strictEqual(requests.length, 2, 'a key XHR is created');
strictEqual(requests[1].url, player.hls.playlists.media().segments[0].key.uri, 'a key XHR is created with correct uri');
player.hls.playlists.media = oldMedia;
});
test('a new keys XHR is created when a previous key XHR finishes', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
var oldMedia = player.hls.playlists.media;
player.hls.playlists.media = function() {
return {
segments: [{
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=52'
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53'
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]
};
};
// we're inject the media playlist, so drop the request
requests.shift();
player.hls.playlists.trigger('loadedplaylist');
// key response
requests[0].response = new Uint32Array([0, 0, 0, 0]).buffer;
requests.shift().respond(200, null, '');
strictEqual(requests.length, 1, 'a key XHR is created');
strictEqual(requests[0].url, player.hls.playlists.media().segments[1].key.uri, 'a key XHR is created with the correct uri');
player.hls.playlists.media = oldMedia;
});
test('calling fetchKeys() when a seek happens will create an XHR', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
var oldMedia = player.hls.playlists.media;
player.hls.playlists.media = function() {
return {
segments: [{
duration: 10,
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=52'
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
duration: 10,
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53'
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]
};
};
player.hls.fetchKeys(player.hls.playlists.media(), 0);
player.currentTime(11);
ok(requests[1].aborted, 'the key XHR should be aborted');
equal(requests.length, 3, 'we should get a new key XHR');
equal(requests[2].url, player.hls.playlists.media().segments[1].key.uri, 'urls should match');
player.hls.playlists.media = oldMedia;
});
test('calling fetchKeys() when a key XHR is in progress will *not* create an XHR', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
var oldMedia = player.hls.playlists.media;
player.hls.playlists.media = function() {
return {
segments: [{
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=52'
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53'
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]
};
};
strictEqual(requests.length, 1, 'no key XHR created for the player');
player.hls.playlists.trigger('loadedplaylist');
player.hls.fetchKeys(player.hls.playlists.media(), 0);
strictEqual(requests.length, 2, 'only the original XHR is available');
player.hls.playlists.media = oldMedia;
});
test('calling fetchKeys() when all keys are fetched, will *not* create an XHR', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
var oldMedia = player.hls.playlists.media;
player.hls.playlists.media = function() {
return {
segments: [{
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=52',
bytes: new Uint8Array([1])
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53',
bytes: new Uint8Array([1])
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]
};
};
player.hls.fetchKeys(player.hls.playlists.media(), 0);
strictEqual(requests.length, 1, 'no XHR for keys created since they were all downloaded');
player.hls.playlists.media = oldMedia;
});
test('retries key requests once upon failure', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
var oldMedia = player.hls.playlists.media;
player.hls.playlists.media = function() {
return {
segments: [{
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=52'
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53'
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]
};
};
player.hls.fetchKeys(player.hls.playlists.media(), 0);
requests[1].respond(404);
equal(requests.length, 3, 'create a new XHR for the same key');
equal(requests[2].url, requests[1].url, 'should be the same key');
requests[2].respond(404);
equal(requests.length, 4, 'create a new XHR for the same key');
notEqual(requests[3].url, requests[2].url, 'should be the same key');
equal(requests[3].url, player.hls.playlists.media().segments[1].key.uri);
player.hls.playlists.media = oldMedia;
});
test('skip segments if key requests fail more than once', function() {
var bytes = [],
tags = [{ pats: 0, bytes: 0 }];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
window.videojs.SourceBuffer = function() {
this.appendBuffer = function(chunk) {
bytes.push(chunk);
};
this.abort = function() {};
};
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="htts://priv.example.com/key.php?r=52"\n' +
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="htts://priv.example.com/key.php?r=53"\n' +
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence53-A.ts\n');
player.hls.playlists.trigger('loadedplaylist');
player.trigger('timeupdate');
// respond to ts segment
standardXHRResponse(requests.pop());
// fail key
requests.pop().respond(404);
// fail key, again
requests.pop().respond(404);
// key for second segment
requests[0].response = new Uint32Array([0,0,0,0]).buffer;
requests[0].respond(200, null, '');
requests.shift();
equal(bytes.length, 1, 'bytes from the ts segments should not be added');
player.trigger('timeupdate');
tags.length = 0;
tags.push({pts: 0, bytes: 1});
// second segment
standardXHRResponse(requests.pop());
equal(bytes.length, 2, 'bytes from the second ts segment should be added');
equal(bytes[1], 1, 'the bytes from the second segment are added and not the first');
});
test('the key is supplied to the decrypter in the correct format', function() {
var keys = [];
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:5\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="htts://priv.example.com/key.php?r=52"\n' +
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n' +
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence52-B.ts\n');
videojs.Hls.decrypt = function(bytes, key) {
keys.push(key);
return new Uint8Array([0]);
};
requests[0].response = new Uint32Array([0,1,2,3]).buffer;
requests[0].respond(200, null, '');
requests.shift();
standardXHRResponse(requests.pop());
equal(keys.length, 1, 'only one call to decrypt was made');
deepEqual(keys[0],
new Uint32Array([0, 0x01000000, 0x02000000, 0x03000000]),
'passed the specified segment key');
});
test('supplies the media sequence of current segment as the IV by default, if no IV is specified', function() {
var ivs = [];
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:5\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="htts://priv.example.com/key.php?r=52"\n' +
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n' +
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence52-B.ts\n');
videojs.Hls.decrypt = function(bytes, key, iv) {
ivs.push(iv);
return new Uint8Array([0]);
};
requests[0].response = new Uint32Array([0,0,0,0]).buffer;
requests[0].respond(200, null, '');
requests.shift();
standardXHRResponse(requests.pop());
equal(ivs.length, 1, 'only one call to decrypt was made');
deepEqual(ivs[0],
new Uint32Array([0, 0, 0, 5]),
'the IV for the segment is the media sequence');
});
test('switching playlists with an outstanding key request does not stall playback', function() {
var media = '#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:5\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="https://priv.example.com/key.php?r=52"\n' +
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n' +
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence52-B.ts\n';
player.src({
src: 'https://example.com/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
// master playlist
standardXHRResponse(requests.shift());
// media playlist
requests.shift().respond(200, null, media);
// mock out media switching from this point on
player.hls.playlists.media = function() {
return player.hls.playlists.master.playlists[0];
};
// don't respond to the initial key request
requests.shift();
// first segment of the original media playlist
standardXHRResponse(requests.shift());
// "switch" media
player.hls.playlists.trigger('mediachange');
player.trigger('timeupdate');
ok(requests.length, 'made a request');
equal(requests[0].url,
'https://priv.example.com/key.php?r=52',
'requested the segment and key');
});
test('resovles relative key URLs against the playlist', function() {
player.src({
src: 'https://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:5\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="key.php?r=52"\n' +
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n');
equal(requests[0].url, 'https://example.com/key.php?r=52', 'resolves the key URL');
});
test('treats invalid keys as a key request failure', function() {
var tags = [{ pts: 0, bytes: 0 }], bytes = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
window.videojs.SourceBuffer = function() {
this.appendBuffer = function(chunk) {
bytes.push(chunk);
};
this.abort = function() {};
};
player.src({
src: 'https://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:5\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="https://priv.example.com/key.php?r=52"\n' +
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n' +
'#EXT-X-KEY:METHOD=NONE\n' +
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence52-B.ts\n');
// keys should be 16 bytes long
requests[0].response = new Uint8Array(1).buffer;
requests.shift().respond(200, null, '');
// segment request
standardXHRResponse(requests.shift());
equal(requests[0].url, 'https://priv.example.com/key.php?r=52', 'retries the key');
// the retried response is invalid, too
requests[0].response = new Uint8Array(1);
requests.shift().respond(200, null, '');
// the first segment should be dropped and playback moves on
player.trigger('timeupdate');
equal(bytes.length, 1, 'did not append bytes');
equal(bytes[0], 'flv', 'appended the flv header');
tags.length = 0;
tags.push({ pts: 1, bytes: 1 });
// second segment request
standardXHRResponse(requests.shift());
equal(bytes.length, 2, 'appended bytes');
equal(1, bytes[1], 'skipped to the second segment');
});
})(window, window.videojs);
<file_sep>/*
* videojs-hls
*
* Copyright (c) 2014 Brightcove
* All rights reserved.
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 <NAME>, <NAME>, <NAME>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
(function(window, videojs, unpad) {
'use strict';
var AES, decrypt;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @constructor
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
AES = function (key) {
this._precompute();
var i, j, tmp,
encKey, decKey,
sbox = this._tables[0][4], decTable = this._tables[1],
keyLen = key.length, rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error("Invalid aes key size");
}
encKey = key.slice(0);
decKey = [];
this._key = [encKey, decKey];
// schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i-1];
// apply sbox
if (i%keyLen === 0 || (keyLen === 8 && i%keyLen === 4)) {
tmp = sbox[tmp>>>24]<<24 ^ sbox[tmp>>16&255]<<16 ^ sbox[tmp>>8&255]<<8 ^ sbox[tmp&255];
// shift rows and add rcon
if (i%keyLen === 0) {
tmp = tmp<<8 ^ tmp>>>24 ^ rcon<<24;
rcon = rcon<<1 ^ (rcon>>7)*283;
}
}
encKey[i] = encKey[i-keyLen] ^ tmp;
}
// schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j&3 ? i : i - 4];
if (i<=4 || j<4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp>>>24 ]] ^
decTable[1][sbox[tmp>>16 & 255]] ^
decTable[2][sbox[tmp>>8 & 255]] ^
decTable[3][sbox[tmp & 255]];
}
}
};
AES.prototype = {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
_tables: [[[],[],[],[],[]],[[],[],[],[],[]]],
/**
* Expand the S-box tables.
*
* @private
*/
_precompute: function () {
var encTable = this._tables[0], decTable = this._tables[1],
sbox = encTable[4], sboxInv = decTable[4],
i, x, xInv, d=[], th=[], x2, x4, x8, s, tEnc, tDec;
// Compute double and third tables
for (i = 0; i < 256; i++) {
th[( d[i] = i<<1 ^ (i>>7)*283 )^i]=i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv<<1 ^ xInv<<2 ^ xInv<<3 ^ xInv<<4;
s = s>>8 ^ s&255 ^ 99;
sbox[x] = s;
sboxInv[s] = x;
// Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8*0x1010101 ^ x4*0x10001 ^ x2*0x101 ^ x*0x1010100;
tEnc = d[s]*0x101 ^ s*0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc<<24 ^ tEnc>>>8;
decTable[i][s] = tDec = tDec<<24 ^ tDec>>>8;
}
}
// Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
},
/**
* Decrypt an array of 4 big-endian words.
* @param {Array} data The ciphertext.
* @return {Array} The plaintext.
*/
decrypt:function (input) {
if (input.length !== 4) {
throw new Error("Invalid aes block size");
}
var key = this._key[1],
// state variables a,b,c,d are loaded with pre-whitened data
a = input[0] ^ key[0],
b = input[3] ^ key[1],
c = input[2] ^ key[2],
d = input[1] ^ key[3],
a2, b2, c2,
nInnerRounds = key.length/4 - 2,
i,
kIndex = 4,
out = [0,0,0,0],
table = this._tables[1],
// load up the tables
t0 = table[0],
t1 = table[1],
t2 = table[2],
t3 = table[3],
sbox = table[4];
// Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = t0[a>>>24] ^ t1[b>>16 & 255] ^ t2[c>>8 & 255] ^ t3[d & 255] ^ key[kIndex];
b2 = t0[b>>>24] ^ t1[c>>16 & 255] ^ t2[d>>8 & 255] ^ t3[a & 255] ^ key[kIndex + 1];
c2 = t0[c>>>24] ^ t1[d>>16 & 255] ^ t2[a>>8 & 255] ^ t3[b & 255] ^ key[kIndex + 2];
d = t0[d>>>24] ^ t1[a>>16 & 255] ^ t2[b>>8 & 255] ^ t3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a=a2; b=b2; c=c2;
}
// Last round.
for (i = 0; i < 4; i++) {
out[3 & -i] =
sbox[a>>>24 ]<<24 ^
sbox[b>>16 & 255]<<16 ^
sbox[c>>8 & 255]<<8 ^
sbox[d & 255] ^
key[kIndex++];
a2=a; a=b; b=c; c=d; d=a2;
}
return out;
}
};
decrypt = function(encrypted, key, initVector) {
var
encryptedView = new DataView(encrypted.buffer),
platformEndian = new Uint32Array(encrypted.byteLength / 4),
decipher = new AES(Array.prototype.slice.call(key)),
decrypted = new Uint8Array(encrypted.byteLength),
decryptedView = new DataView(decrypted.buffer),
decryptedBlock,
word,
byte;
// convert big-endian input to platform byte order for decryption
for (byte = 0; byte < encrypted.byteLength; byte += 4) {
platformEndian[byte >>> 2] = encryptedView.getUint32(byte);
}
// decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (word = 0; word < platformEndian.length; word += 4) {
// decrypt the block
decryptedBlock = decipher.decrypt(platformEndian.subarray(word, word + 4));
// XOR with the IV, and restore network byte-order to obtain the
// plaintext
byte = word << 2;
decryptedView.setUint32(byte, decryptedBlock[0] ^ initVector[0]);
decryptedView.setUint32(byte + 4, decryptedBlock[1] ^ initVector[1]);
decryptedView.setUint32(byte + 8, decryptedBlock[2] ^ initVector[2]);
decryptedView.setUint32(byte + 12, decryptedBlock[3] ^ initVector[3]);
// setup the IV for the next round
initVector = platformEndian.subarray(word, word + 4);
}
// remove any padding
return unpad(decrypted);
};
// exports
videojs.Hls.decrypt = decrypt;
})(window, window.videojs, window.pkcs7.unpad);
<file_sep>(function(window) {
/*
======== A Handy Little QUnit Reference ========
http://api.qunitjs.com/
Test methods:
module(name, {[setup][ ,teardown]})
test(name, callback)
expect(numberOfAssertions)
stop(increment)
start(decrement)
Test assertions:
ok(value, [message])
equal(actual, expected, [message])
notEqual(actual, expected, [message])
deepEqual(actual, expected, [message])
notDeepEqual(actual, expected, [message])
strictEqual(actual, expected, [message])
notStrictEqual(actual, expected, [message])
throws(block, [expected], [message])
*/
var FlvTag = window.videojs.Hls.FlvTag;
module('FLV tag');
test('writeBytes with zero length writes the entire array', function() {
var
tag = new FlvTag(FlvTag.VIDEO_TAG),
headerLength = tag.length;
tag.writeBytes(new Uint8Array([0x1, 0x2, 0x3]));
equal(3 + headerLength, tag.length, '3 payload bytes are written');
});
test('writeShort writes a two byte sequence', function() {
var
tag = new FlvTag(FlvTag.VIDEO_TAG),
headerLength = tag.length;
tag.writeShort(0x0102);
equal(2 + headerLength, tag.length, '2 bytes are written');
equal(0x0102,
new DataView(tag.bytes.buffer).getUint16(tag.length - 2),
'the value is written');
});
test('writeBytes grows the internal byte array dynamically', function() {
var
tag = new FlvTag(FlvTag.VIDEO_TAG),
tooManyBytes = new Uint8Array(tag.bytes.byteLength + 1);
try {
tag.writeBytes(tooManyBytes);
ok(true, 'the buffer grew to fit the data');
} catch(e) {
ok(!e, 'the buffer should grow');
}
});
})(this);
| 637b8d66457319d870f57a09cee55b9fb6466cb3 | [
"JavaScript",
"Markdown"
] | 15 | JavaScript | tedconf/videojs-contrib-hls | dc135fdf3241118c75e47a3096101f6cb735500e | d88a6678138529f1bcf73a680e570227d4002332 | |
refs/heads/master | <file_sep>from __future__ import print_function
try:
import json
except ImportError:
from django.utils import simplejson as json
from django.core.management.base import BaseCommand
try:
from django.db.models import loading
except ImportError:
from django.apps import apps as loading
from django.core.serializers import serialize
from django.conf import settings
from fixture_magic.utils import (
add_to_serialize_list,
reorder_json,
serialize_fully
)
def process_dep(parent, dep, serialize_me, seen):
parts = dep.split('.')
current = parts.pop(0)
leftover = '.'.join(parts)
try:
thing = getattr(parent, current)
except AttributeError:
pass # related object not found
else:
if hasattr(thing, 'all'):
children = thing.all()
else:
children = [thing]
add_to_serialize_list(children, serialize_me, seen)
if leftover:
for child in children:
process_dep(child, leftover, serialize_me, seen)
class Command(BaseCommand):
help = 'Dump multiple pre-defined sets of objects into a JSON fixture.'
def add_arguments(self, parser):
parser.add_argument('dump_name')
parser.add_argument('pk', nargs='*')
parser.add_argument('--natural', default=False, action='store_true', dest='natural',
help='Use natural keys if they are available.')
def handle(self, *args, **options):
serialize_me = []
seen = set()
# Get the primary object
dump_name = options['dump_name']
pks = options['pk']
dump_settings = settings.CUSTOM_DUMPS[dump_name]
app_label, model_name, *manager_method = dump_settings['primary'].split('.')
include_primary = dump_settings.get("include_primary", False)
default_manager = loading.get_model(app_label, model_name).objects
if manager_method:
queryset = getattr(default_manager, manager_method[0])()
else:
queryset = default_manager.all()
if pks:
queryset = queryset.filter(pk__in=pks)
deps = dump_settings.get('dependents', [])
for obj in queryset:
# get the dependent objects and add to serialize list
for dep in deps:
process_dep(obj, dep, serialize_me, seen)
if include_primary or not deps:
add_to_serialize_list([obj], serialize_me, seen)
serialize_fully(serialize_me, seen)
data = serialize(
'json', [o for o in serialize_me if o is not None],
indent=4,
use_natural_foreign_keys=options.get('natural', False),
use_natural_primary_keys=options.get('natural', False),
)
data = reorder_json(
json.loads(data),
dump_settings.get('order', []),
ordering_cond=dump_settings.get('order_cond', {})
)
self.stdout.write(json.dumps(data, indent=4))
<file_sep>from __future__ import print_function
from django.core.exceptions import FieldError, ObjectDoesNotExist
from django.core.management.base import CommandError
from django.core.serializers import serialize
from fixture_magic.compat import get_all_related_objects, get_all_generic_foreign_keys
try:
from django.db.models import loading
except ImportError:
from django.apps import apps as loading
import json
from fixture_magic.utils import (add_to_serialize_list, serialize_fully, reorder_json)
from django.contrib.contenttypes.models import ContentType
from django.core.serializers.json import Serializer as JsonSerializer
class CustomJsonSerializer(JsonSerializer):
def handle_fk_field(self, obj, field):
is_content_type = True if hasattr(field, "related_model") and \
field.related_model.__name__ == "ContentType" else False
related_model = hasattr(field, "related_model")
ct = isinstance(field.related_model, ContentType)
if is_content_type or (self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key')):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = self._value_from_field(obj, field)
self._current[field.name] = value
def dump_object(model,
query,
ids,
order=[],
ignore=[],
related_ignore={},
additional_serialization_objects_fnc=None,
format='json',
kitchensink=True,
follow_fk=True,
natural=False,
natural_foreign=False,
natural_primary=False):
serialize_me = []
seen = set()
error_text = ('Error\n')
try:
# verify input is valid
try:
(app_label, model_name) = model.split('.')
except AttributeError:
raise CommandError("Specify model as `appname.modelname")
if ids and query:
raise CommandError(error_text % 'either use query or id list, not both')
if not (ids or query):
raise CommandError(error_text % 'must pass list of --ids or a json --query')
except IndexError:
raise CommandError(error_text % 'No object_class or filter clause supplied.')
except ValueError as e:
raise CommandError(
error_text %
"object_class must be provided in the following format: app_name.model_name"
)
except AssertionError:
raise CommandError(error_text % 'No filter argument supplied.')
dump_me = loading.get_model(app_label, model_name)
if query:
objs = dump_me.objects.filter(**json.loads(query))
else:
if ids[0] == '*':
objs = dump_me.objects.all()
else:
try:
parsers = int, long, str
except NameError:
parsers = int, str
for parser in parsers:
try:
objs = dump_me.objects.filter(pk__in=map(parser, ids))
except ValueError:
pass
else:
break
if kitchensink:
fields = get_all_related_objects(dump_me)
# if there are no related ignores for this model include the field
# or if this related field is not in the related ignores include the field
related_fields = [rel.get_accessor_name() for rel in fields if
model_name not in related_ignore or
rel.name not in related_ignore[model_name]]
generic_foreign_key_fields = get_all_generic_foreign_keys(dump_me)
for field in generic_foreign_key_fields:
if model_name not in related_ignore or field.name not in related_ignore[model_name]:
related_fields.append(field.name)
for obj in objs:
for rel in related_fields:
try:
if hasattr(getattr(obj, rel), 'all'):
add_to_serialize_list(getattr(obj, rel).all(), serialize_me, seen, ignore=ignore)
else:
add_to_serialize_list([getattr(obj, rel)], serialize_me, seen, ignore=ignore)
# allow user to add additional data apart from standard foreign keys
if additional_serialization_objects_fnc and \
callable(additional_serialization_objects_fnc):
extra_objs = additional_serialization_objects_fnc(getattr(obj, rel))
if extra_objs:
add_to_serialize_list(extra_objs, serialize_me, seen, ignore=ignore)
except FieldError:
pass
except ObjectDoesNotExist:
pass
add_to_serialize_list(objs, serialize_me, seen, prepend=True, ignore=ignore)
if follow_fk:
serialize_fully(serialize_me, seen, ignore, additional_serialization_objects_fnc)
else:
# reverse list to match output of serializez_fully
serialize_me.reverse()
natural_foreign = (natural or
natural_foreign)
natural_primary = (natural or
natural_primary)
if format:
if format == "json":
data = CustomJsonSerializer().serialize(
[o for o in serialize_me if o is not None],
indent=4,
use_natural_foreign_keys=natural_foreign,
use_natural_primary_keys=natural_primary)
else:
data = serialize(format,
[o for o in serialize_me if o is not None],
indent=4,
use_natural_foreign_keys=natural_foreign,
use_natural_primary_keys=natural_primary)
data = reorder_json(
json.loads(data),
order,
)
return json.dumps(data, indent=4)
# return unserialized objs
return [o for o in serialize_me if o is not None]
| 63ee370aa8d1db4728a0ac0daa2a509c51bd32a6 | [
"Python"
] | 2 | Python | pmaccamp/django-fixture-magic | 8bccfdd6d9cd2d741739d21de9a7d59dbf12a82a | bfc07aaf44535f7318e0dcaee793608a726a61ec | |
refs/heads/main | <file_sep>import React from 'react';
import { shallow } from 'enzyme';
import GiftCard from "./GiftCard";
import Button from '@material-ui/core/Button';
import { Link } from 'react-router-dom';
describe("Landing Page Test", () => {
it("should load GiftCard Component", () => {
let wrapper = shallow(<GiftCard />);
expect(wrapper).not.toBe(null);
});
it("should have props for GiftCard", () =>{
let classess = { card: {}, avatar:{}, media:{}}
let gift = {cardName: 'amazon', id: 1, cardPoints: 10, cardImage: 'image.png', cardCount: 11, cardShortDesc: '10% OFF'}
let wrapper = shallow(<GiftCard classes={classess}
userEmail='<EMAIL>'
giftCard={gift} />)
wrapper.instance().setState({expanded: false});
expect(wrapper.instance().state.expanded).toBe(false);
})
// it('', () =>{
// let classess = { card: {}, avatar:{}, media:{}}
// let gift = {cardName: 'amazon', id: 1, cardPoints: 10, cardImage: 'image.png', cardCount: 11, cardShortDesc: '10% OFF'}
// const component = renderer.create(
// <GiftCard classes={classess}
// userEmail='<EMAIL>'
// giftCard={gift} />
// );
// const tree = component.toJSON();
// expect(tree).toMatchSnapshot();
// });
});<file_sep>import axios from "axios";
import thunk from 'redux-thunk'
import configureMockStore from 'redux-mock-store'
import {
fetchCard,
fetchCards,
fetchCardFilter,
adminUpdateCard,
adminAddCard,
updateCardCount
} from "./index";
import { FETCH_CARDS, FETCH_CARD, FETCH_CARD_FILTER, ADMIN_ADD_CARD, UPDATE_CARD_COUNT, ADMIN_UPDATE_CARD } from './types';
jest.mock('axios');
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
describe('All gifts actions', function description() {
it ("On fetchCards Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.get.mockResolvedValueOnce({data:[{ id: 1, name: "iphone" }], headers:{"x-total-count": 1}})
const expectedAction = [
{
type: FETCH_CARDS,
payload: {data:[{ id: 1, name: "iphone" }], totalCount : 1}
}
]
await store.dispatch(fetchCards(1,3));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
it ("On fetchCard Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.get.mockResolvedValueOnce({ id: 1, name: "iphone" })
const expectedAction = [
{
type: FETCH_CARD,
payload: { id: 1, name: "iphone" }
}
]
await store.dispatch(fetchCard(1));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
it ("On adminUpdateCard Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.patch.mockResolvedValueOnce({ id: 1, name: "iphone" })
const expectedAction = [
{
type: ADMIN_UPDATE_CARD,
payload: { id: 1, name: "iphone" }
}
]
await store.dispatch(adminUpdateCard(1,{ id: 1, name: "amazon" }));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
it ("On fetchCardFilter Action trigger", async () => {
let state = {};
const store = mockStore(state);
const expectedAction = [
{
type: FETCH_CARD_FILTER,
payload: { id: 1, name: "amazon" }
}
]
await store.dispatch(fetchCardFilter({id: 1, name: "amazon" }));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
it ("On updateCardCount Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.patch.mockResolvedValueOnce({data:{ id: 1, name: "iphone", cardCount:2 }})
const expectedAction = [
{
type: UPDATE_CARD_COUNT,
payload: { id: 1, name: "iphone", cardCount:2 }
}
]
await store.dispatch(updateCardCount(1,1));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
it ("On adminAddCard Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.post.mockResolvedValueOnce({data:{ id: 1, name: "food Panda" }})
const expectedAction = [
{
type: ADMIN_ADD_CARD,
payload: { id: 1, name: "food Panda" }
}
]
await store.dispatch(adminAddCard({ id: 1, name: "food Panda" }));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
});<file_sep>import { Random_rgba } from './Random_rgba';
describe('Random_rgba component', () => {
it('Random_rgba', () => {
let comp = Random_rgba()
expect(comp).not.toBe(null);
})
}); <file_sep>import loginReducer from "./loginReducer";
import { LOGIN, LOGOUT } from "../actions/types";
import { logout } from "../actions";
describe ("Login Reducer Tests", () => {
let user;
beforeEach(() => {
user=null
});
it ("should be empty list", () => {
let state = {};
let login = {...state,
loginStatus: true,
detailsObject: {}
}
let logout = {...state,
loginStatus: false,
detailsObject: {}
}
const initial = user ? login : logout;
expect(loginReducer (initial, {})).
toEqual({"detailsObject": {}, "loginStatus": false})
});
it("when action type is LOGIN", () =>{
let state = {};
expect(loginReducer ({...state,
loginStatus: true,
detailsObject: {}
}, {type:LOGIN, payload: {id:1, name:'<NAME>'}}))
.toEqual({"detailsObject": {"id": 1, "name": "<NAME>"}, "loginStatus": true})
});
it("when action type is LOGOUT", () =>{
let state = {}
expect(loginReducer ({...state,
loginStatus: false,
detailsObject: {}
}, {type:LOGOUT, payload: {id:1, name:'<NAME>'}}))
.toEqual({"detailsObject": {}, "loginStatus": false})
})
it ("should be same state", () => {
let state = [];
expect(loginReducer(state, {type:'__UNKNOWN'})).toStrictEqual({})
});
})<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import GiftsList from './GiftsList';
import { jssPreset } from '@material-ui/core';
describe("GiftsList Section Test", () => {
it('should load component', () =>{
let props = {
giftCards:[],
totalCount: 0,
fetchMorecards: jest.fn(),
userDetails:{
email: ''
},
classes:{
root:{}
}
}
let wrapper = shallow(<GiftsList {...props}/>);
expect(wrapper).toBeTruthy();
})
// it('should call componentDidMount', () =>{
// let props = {
// giftCards:[],
// totalCount: 0,
// fetchMorecards: jest.fn(),
// userDetails:{
// email: ''
// },
// classes:{
// root:{}
// }
// }
// let wrapper = shallow(<GiftsList {...props}/>);
// const c = wrapper.instance();
// const check = jest.spyOn(c,'componentDidMount');
// expect(check).toHaveBeenCalled();
// })
});<file_sep>// import React from 'react';
// import PropTypes from 'prop-types';
// import { withStyles } from '@material-ui/core/styles';
// import Paper from '@material-ui/core/Paper';
// import { Grid, AutoSizer, Table, Column, InfiniteLoader } from 'react-virtualized';
// import {DateFormatter} from '../../common/components/DateFormatter';
// import 'react-virtualized/styles.css';
// const styles = theme => ({
// root: {
// width: '100%',
// marginTop: theme.spacing.unit * 3,
// //overflowX: 'auto',
// },
// table: {
// minWidth: 700,
// },
// tablecell: {
// border: '1px solid black',
// lineHeight: '30px',
// textAlign: 'center',
// padding: '0 4px',
// }
// });
// class GiftsSend extends React.PureComponent {
// constructor(props) {
// super(props);
// this.state = {
// prevStaterows: [],
// rows: []
// };
// // this.generateData();
// }
// componentDidMount() {
// debugger;
// if(this.props.data)
// this.generateData([]);
// }
// componentDidUpdate(prevProps) {
// debugger;
// // Typical usage (don't forget to compare props):
// if (JSON.stringify(this.props.data) !== JSON.stringify(prevProps.data)) {
// this.generateData(prevProps.data);
// }
// }
// componentWillReceiveProps (nextProps) {
// if (JSON.stringify(this.props.data) !== JSON.stringify(nextProps.data)) {
// this.generateData(this.props.data);
// }
// }
// generateData = (arr) => {
// debugger;
// let rowes = arr;
// // this.setState(
// // prevState => {
// // return {
// // prevStaterows: prevState.rows
// // };
// // },() => {rowes = [...this.state.prevStaterows]}
// // );
// // rowes = [...this.state.prevStaterows];
// this.props.data.forEach((row,i) => {
// let a = {
// "cardName":row.cardName,
// "points": row.cardPoints,
// "receiveTo":row.senderEmail,
// "issueDate":DateFormatter(row.cardIssueDate),
// "expiryDate":DateFormatter(row.cardExpiryDate)
// }
// // rowes[i] = [];
// // let b = Object.values(a);
// // for (let j = 0; j < Object.entries(a).length; j++) {
// // rowes[i].push(b[j]);
// // }
// rowes.push(a);
// });
// this.setState({rows:rowes});
// }
// loadMore = () => {
// debugger;
// this.props.fetchSentCards();
// }
// // cellRenderer = ({ columnIndex, key, rowIndex, style }) => {
// // return (
// // <div key={key} style={style}>
// // <div className={this.props.classes.tablecell}>
// // {this.state.rows[rowIndex][columnIndex]}
// // </div>
// // </div>
// // );
// // };
// render() {
// const {classes, data, totalCount} = this.props;
// return (
// <Paper className={classes.root}>
// {/* <Table className={classes.table}>
// <TableHead>
// <TableRow>
// <TableCell>CARD NAME</TableCell>
// <TableCell >POINTS</TableCell>
// <TableCell >SENT TO</TableCell>
// <TableCell >ISSUE DATE</TableCell>
// <TableCell >EXPIRY DATE</TableCell>
// </TableRow>
// </TableHead>
// <TableBody>
// {data.map(row => (
// <TableRow key={row.id}>
// <TableCell>{row.cardName}</TableCell>
// <TableCell >{row.cardPoints}</TableCell>
// <TableCell >{row.receiverEmail}</TableCell>
// <TableCell>{DateFormatter(row.cardIssueDate)}</TableCell>
// <TableCell>{DateFormatter(row.cardExpiryDate)}</TableCell>
// </TableRow>
// ))}
// </TableBody>
// </Table> */}
// {/* <div style={{width:"100%", height:"74vh"}}>
// <AutoSizer>
// {({width, height}) =>(
// // <List width={width} height={height} rowHeight={50} rowCount={data.length}
// // rowRenderer={({key, index, style, parent}) =>{
// // const row = data[index];
// // return (<div key={key} style={style}>
// // <tr> <div>{row.cardName}</div>
// // <div>{row.cardPoints}</div>
// // <div>{row.receiverEmail}</div>
// // <div>{DateFormatter(row.cardIssueDate)}</div>
// // <div>{DateFormatter(row.cardExpiryDate)}</div>
// // </tr>
// // </div>);
// // }} />
// // <Grid
// // cellRenderer={this.cellRenderer}
// // overscanRowCount={2}
// // columnCount={5}
// // columnWidth={200}
// // height={height}
// // rowCount={data.length}
// // rowHeight={30}
// // width={width}
// // />
// <Table
// width={width}
// height={height}
// headerHeight={40}
// rowHeight={40}
// rowCount={this.state.rows.length}
// rowGetter={({index}) => this.state.rows[index]}>
// <Column label="Card Name" dataKey="cardName" width={200} />
// <Column width={200} label="Card Points" dataKey="points" />
// <Column label="Receiver Email" dataKey="receiveTo" width={400} />
// <Column width={300} label="Card Issue Date" dataKey="issueDate" />
// <Column width={300} label="Card Expiry Date" dataKey="expiryDate" />
// </Table>
// )}
// </AutoSizer>
// </div> */}
// <div style={{width:"100%", height:"74vh"}}>
// <InfiniteLoader
// isRowLoaded={({index}) => !!this.state.rows[index]}
// loadMoreRows={this.loadMore}
// rowCount={totalCount}
// >
// {({onRowsRendered, registerChild}) => (
// <AutoSizer>
// {({ width, height}) => (
// <Table
// ref={registerChild}
// onRowsRendered={onRowsRendered}
// rowClassName='table-row'
// headerHeight={40}
// width={width}
// height={height}
// rowHeight={40}
// rowCount={this.state.rows.length}
// rowGetter={({ index }) => this.state.rows[index]}
// >
// <Column label="Card Name" dataKey="cardName" width={200} />
// <Column width={200} label="Card Points" dataKey="points" />
// <Column label="Receiver Email" dataKey="receiveTo" width={400} />
// <Column width={300} label="Card Issue Date" dataKey="issueDate" />
// <Column width={300} label="Card Expiry Date" dataKey="expiryDate" />
// </Table>
// )}
// </AutoSizer>
// )}
// </InfiniteLoader>
// </div>
// </Paper>
// );
// }
// }
// GiftsSend.propTypes = {
// classes: PropTypes.object.isRequired,
// };
// export default withStyles(styles)(GiftsSend);
import React, { useEffect, useState, useRef } from 'react';
import PropTypes from 'prop-types';
import { withStyles } from '@material-ui/core/styles';
import Paper from '@material-ui/core/Paper';
import { AutoSizer, Table, Column, InfiniteLoader } from 'react-virtualized';
import {DateFormatter} from '../../common/components/DateFormatter';
import 'react-virtualized/styles.css';
const styles = theme => ({
root: {
width: '100%',
marginTop: theme.spacing.unit * 3,
//overflowX: 'auto',
},
table: {
minWidth: 700,
},
tablecell: {
border: '1px solid black',
lineHeight: '30px',
textAlign: 'center',
padding: '0 4px',
}
});
function GiftsSend (props) {
const { classes } = props;
const [rows, setRows] = useState([]);
const prevCount = useRef();
useEffect(() => {
prevCount.current = rows;
});
const loadMore = () =>{
props.fetchSentCards();
// resolve the promise after data where fetched
// this.promiseResolve();
}
useEffect(() =>{
const generateData = () => {
const rowes = [...prevCount.current];
props.data.forEach((row,i) => {
let a = {
"cardName":row.cardName,
"points": row.cardPoints,
"receiveTo":row.senderEmail,
"issueDate":DateFormatter(row.cardIssueDate),
"expiryDate":DateFormatter(row.cardExpiryDate)
}
// rowes[i] = [];
// let b = Object.values(a);
// for (let j = 0; j < Object.entries(a).length; j++) {
// rowes[i].push(b[j]);
// }
rowes.push(a);
});
setRows(rowes);
}
generateData();
},[props.data]);
// generateData =() => {
// debugger;
// let rowes = [];
// // this.setState(
// // prevState => {
// // return {
// // prevStaterows: prevState.rows
// // };
// // },() => {rowes = [...this.state.prevStaterows]}
// // );
// rowes = [...this.state.prevStaterows]
// this.props.data.forEach((row,i) => {
// let a = {
// "cardName":row.cardName,
// "points": row.cardPoints,
// "receiveTo":row.senderEmail,
// "issueDate":DateFormatter(row.cardIssueDate),
// "expiryDate":DateFormatter(row.cardExpiryDate)
// }
// // rowes[i] = [];
// // let b = Object.values(a);
// // for (let j = 0; j < Object.entries(a).length; j++) {
// // rowes[i].push(b[j]);
// // }
// rowes.push(a);
// });
// this.setState({rows:rowes});
// }
// loadMore = () => {
// debugger;
// this.props.fetchSentCards();
// }
// cellRenderer = ({ columnIndex, key, rowIndex, style }) => {
// return (
// <div key={key} style={style}>
// <div className={this.props.classes.tablecell}>
// {this.state.rows[rowIndex][columnIndex]}
// </div>
// </div>
// );
// };
// render() {
// const {classes, data, totalCount} = this.props;
return (
<Paper className={classes.root}>
{/* <Table className={classes.table}>
<TableHead>
<TableRow>
<TableCell>CARD NAME</TableCell>
<TableCell >POINTS</TableCell>
<TableCell >SENT TO</TableCell>
<TableCell >ISSUE DATE</TableCell>
<TableCell >EXPIRY DATE</TableCell>
</TableRow>
</TableHead>
<TableBody>
{data.map(row => (
<TableRow key={row.id}>
<TableCell>{row.cardName}</TableCell>
<TableCell >{row.cardPoints}</TableCell>
<TableCell >{row.receiverEmail}</TableCell>
<TableCell>{DateFormatter(row.cardIssueDate)}</TableCell>
<TableCell>{DateFormatter(row.cardExpiryDate)}</TableCell>
</TableRow>
))}
</TableBody>
</Table> */}
{/* <div style={{width:"100%", height:"74vh"}}>
<AutoSizer>
{({width, height}) =>(
// <List width={width} height={height} rowHeight={50} rowCount={data.length}
// rowRenderer={({key, index, style, parent}) =>{
// const row = data[index];
// return (<div key={key} style={style}>
// <tr> <div>{row.cardName}</div>
// <div>{row.cardPoints}</div>
// <div>{row.receiverEmail}</div>
// <div>{DateFormatter(row.cardIssueDate)}</div>
// <div>{DateFormatter(row.cardExpiryDate)}</div>
// </tr>
// </div>);
// }} />
// <Grid
// cellRenderer={this.cellRenderer}
// overscanRowCount={2}
// columnCount={5}
// columnWidth={200}
// height={height}
// rowCount={data.length}
// rowHeight={30}
// width={width}
// />
<Table
width={width}
height={height}
headerHeight={40}
rowHeight={40}
rowCount={this.state.rows.length}
rowGetter={({index}) => this.state.rows[index]}>
<Column label="Card Name" dataKey="cardName" width={200} />
<Column width={200} label="Card Points" dataKey="points" />
<Column label="Receiver Email" dataKey="receiveTo" width={400} />
<Column width={300} label="Card Issue Date" dataKey="issueDate" />
<Column width={300} label="Card Expiry Date" dataKey="expiryDate" />
</Table>
)}
</AutoSizer>
</div> */}
<div style={{width:"100%", height:"74vh"}}>
<InfiniteLoader
isRowLoaded={({index}) => !!rows[index]}
loadMoreRows={loadMore}
rowCount={props.totalCount}
>
{({onRowsRendered, registerChild}) => (
<AutoSizer>
{({ width, height}) => (
<Table
ref={registerChild}
onRowsRendered={onRowsRendered}
rowClassName='table-row'
headerHeight={40}
width={width}
height={height}
rowHeight={40}
rowCount={rows.length}
rowGetter={({ index }) => rows[index]}
>
<Column label="Card Name" dataKey="cardName" width={200} />
<Column width={200} label="Card Points" dataKey="points" />
<Column label="Receiver Email" dataKey="receiveTo" width={400} />
<Column width={300} label="Card Issue Date" dataKey="issueDate" />
<Column width={300} label="Card Expiry Date" dataKey="expiryDate" />
</Table>
)}
</AutoSizer>
)}
</InfiniteLoader>
</div>
</Paper>
);
//}
}
GiftsSend.propTypes = {
classes: PropTypes.object.isRequired,
};
export default withStyles(styles)(GiftsSend);
<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import GiftShow from './GiftShow';
describe("GiftShow Section Test", () => {
it('should load component', () =>{
let props = {
data:{
cardImage:'',
cardName: '',
cardPoints: '',
cardCount: '',
cardExpiryDate: '',
cardComments:[
{
first_name: '',
last_name: '',
commented_on: '',
comment: '',
rating:''
}
],
cardVendor: '',
cardLongDesc: ''
}
}
let wrapper = shallow(<GiftShow {...props}/>);
expect(wrapper).toBeTruthy();
})
});<file_sep>import React, { useEffect, useState, useRef } from 'react';
import PropTypes from 'prop-types';
import { withStyles } from '@material-ui/core/styles';
import { Table, Column, AutoSizer, InfiniteLoader } from 'react-virtualized';
import Paper from '@material-ui/core/Paper';
//import { Button } from '@material-ui/core';
import {DateFormatter} from '../../common/components/DateFormatter';
import 'react-virtualized/styles.css';
import { Button } from '@material-ui/core';
const styles = theme => ({
root: {
// minHeight: '100vh',
width: '100%',
marginTop: theme.spacing.unit * 3,
//overflowX: 'auto',
},
table: {
minWidth: 700,
},
tablecell: {
border: '1px solid black',
lineHeight: '30px',
textAlign: 'center',
padding: '0 4px',
}
});
function GiftsReceived(props) {
const { classes } = props;
const [rows, setRows] = useState([]);
const prevCount = useRef();
useEffect(() => {
prevCount.current = rows;
});
const loadMore = () =>{
props.fetchReceivedCards();
// resolve the promise after data where fetched
// this.promiseResolve();
}
useEffect(() =>{
const generateData = () => {
const rowes = [...prevCount.current];
props.data.forEach((row,i) => {
let a = {
"cardName":row.cardName,
"points": row.cardPoints,
"sentTo":row.senderEmail,
"shortDesc": row.cardShortDesc,
"issueDate":DateFormatter(row.cardIssueDate),
"expiryDate":DateFormatter(row.cardExpiryDate),
"redeem" : row.isRedeemed ? 'Redeemed': 'notRedeemed'
}
// rowes[i] = [];
// let b = Object.values(a);
// for (let j = 0; j < Object.entries(a).length; j++) {
// rowes[i].push(b[j]);
// }
rowes.push(a);
});
setRows(rowes);
}
generateData();
},[props.data]);
// const cellRenderer = ({ columnIndex, key, rowIndex, style }) => {
// return (
// <div key={key} style={style}>
// <div className={props.classes.tablecell}>
// {rows[rowIndex][columnIndex] === 'notRedeemed' ?
// <Button variant="contained" color="primary" onClick={handleRedeem(rowIndex)}>
// Redeem
// </Button> : rows[rowIndex][columnIndex]}
// </div>
// </div>
// );
// };
// const handleRedeem = (index) =>{
// props.redeemCard(rows[index])
// }
return (
<Paper className={classes.root}>
{/* <div style={{width:"100%", height:"74vh"}}>
<AutoSizer>
{({width, height}) =>(
// <List width={width} height={height} rowHeight={200} rowCount={data.length}
// overscanRowCount={2}
// rowRenderer={({key, index, style, parent}) =>{
// const row = data[index];
// return (<div key={key} style={style}>
// <tr> <div>{row.cardName}</div>
// <div>{row.cardPoints}</div>
// <div>{row.senderEmail}</div>
// <div>{row.cardShortDesc}</div>
// <div>{DateFormatter(row.cardIssueDate)}</div>
// <div>{DateFormatter(row.cardExpiryDate)}</div>
// <div>{row.isRedeemed ? 'Redeemed' : <Button variant="contained" color="primary" onClick={()=>props.redeemCard(row)}>Redeem</Button>}</div>
// </tr>
// </div>);
// }
// } />
// <Grid
// cellRenderer={cellRenderer}
// overscanRowCount={2}
// columnCount={7}
// columnWidth={200}
// height={height}
// rowCount={data.length}
// rowHeight={30}
// width={width}
// />
<Table
width={width}
height={height}
headerHeight={40}
rowHeight={40}
rowCount={rows.length}
overscanRowCount={10}
rowGetter={({index}) => rows[index]}>
<Column label="Card Name" dataKey="cardName" width={200} />
<Column width={200} label="Card Points" dataKey="points" />
<Column label="Sender Email" dataKey="sentTo" width={400} />
<Column label="Short Desc" dataKey="shortDesc" width={200} />
<Column width={300} label="Card Issue Date" dataKey="issueDate" />
<Column width={300} label="Card Expiry Date" dataKey="expiryDate" />
<Column label="Status" dataKey="redeem" width={200} />
</Table>
)}
</AutoSizer>
</div> */}
<div style={{width:"100%", height:"74vh"}}>
<InfiniteLoader
isRowLoaded={({ index}) => !!rows[index]}
loadMoreRows={loadMore}
rowCount={props.totalCount}
>
{({onRowsRendered, registerChild}) => (
<AutoSizer>
{({ width, height}) => (
<Table
ref={registerChild}
onRowsRendered={onRowsRendered}
rowClassName='table-row'
headerHeight={40}
width={width}
height={height}
rowHeight={40}
rowCount={rows.length}
rowGetter={({ index }) => rows[index]}
>
<Column label="Card Name" dataKey="cardName" width={200} />
<Column width={200} label="Card Points" dataKey="points" />
<Column label="Sender Email" dataKey="sentTo" width={400} />
<Column label="Short Desc" dataKey="shortDesc" width={200} />
<Column width={300} label="Card Issue Date" dataKey="issueDate" />
<Column width={300} label="Card Expiry Date" dataKey="expiryDate" />
<Column label="Status" dataKey="redeem" width={200}
cellRenderer={
({rowData}) => rowData.isRedeemed ? 'Redeemed' :
<Button color='primary' varient="contained" onClick={() =>{console.log(rowData)}}>
Redeem
</Button>
} />
</Table>
)}
</AutoSizer>
)}
</InfiniteLoader>
</div>
</Paper>
);
}
GiftsReceived.propTypes = {
classes: PropTypes.object.isRequired,
};
export default withStyles(styles)(GiftsReceived);<file_sep>import giftsReducer from "./giftsReducer";
import { FETCH_CARDS, FETCH_CARD, FETCH_CARD_FILTER, UPDATE_CARD_COUNT, ADMIN_ADD_CARD, ADMIN_UPDATE_CARD } from "../actions/types";
describe ("Gifts Reducer Tests", () => {
it ("should be empty giftlist", () => {
let state = {
giftCards: [],
giftCardsFiltered: [],
giftCard: {}
};
expect(giftsReducer ({
giftCards: [],
giftCardsFiltered: [],
giftCard: {}
}, {})).toEqual(state)
});
it("when action type is FETCH_CARDS", () =>{
let state = {
giftCards: [],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state,
{type:FETCH_CARDS, payload: {data:[{id:1, name:'new card1'},{id:2, name:'new card2'}], totalCount:2}}))
.toEqual({
giftCards: [{id:1, name:'new card1'}, {id:2, name:'new card2'}],
giftCardsFiltered: [{id:1, name:'new card1'}, {id:2, name:'new card2'}],
giftCard: {},
totalCount: 2
})
});
it("when action type is FETCH_CARD_FILTER", () =>{
let state = {
giftCards: [],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state, {type:FETCH_CARD_FILTER, payload: [{id:1, name:'new card'}]}))
.toEqual({
giftCards: [],
giftCardsFiltered: [{id:1, name:'new card'}],
giftCard: {},
})
});
it("when action type is UPDATE_CARD_COUNT", () =>{
let state = {
giftCards: [],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state, {type:UPDATE_CARD_COUNT, payload: {id:1, name:'new card'}}))
.toEqual({
giftCards: [],
giftCardsFiltered: [],
giftCard: {id:1, name:'new card'},
})
});
it("when action type is FETCH_CARD", () =>{
let state = {
giftCards: [],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state, {type:FETCH_CARD, payload: {data:{id:1, name:'new card'}}}))
.toEqual({
giftCards: [],
giftCardsFiltered: [],
giftCard: {id:1, name:'new card'},
})
});
it("when action type is ADMIN_ADD_CARD", () =>{
let state = {
giftCards: [],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state, {type:ADMIN_ADD_CARD}))
.toEqual({
giftCards: [],
giftCardsFiltered: [],
giftCard: {},
})
});
it("when action type is ADMIN_UPDATE_CARD", () =>{
let state = {
giftCards: [{id:1, name:'new card1'}],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state, {type:ADMIN_UPDATE_CARD,payload: {data:{id:1, name:'new card'}}}))
.toEqual({
giftCards: [{id:1, name:'new card'}],
giftCardsFiltered: [],
giftCard: {},
})
});
it("when action type is ADMIN_UPDATE_CARD with not having id present", () =>{
let state = {
giftCards: [{id:1, name:'new card1'}],
giftCardsFiltered: [],
giftCard: {},
};
expect(giftsReducer(state, {type:ADMIN_UPDATE_CARD,payload: {data:{id:2, name:'new card'}}}))
.toEqual({
giftCards: [{id:1, name:'new card1'}],
giftCardsFiltered: [],
giftCard: {},
})
});
it ("should be same state", () => {
let state = [];
expect(giftsReducer(state, {type:'__UNKNOWN'})).toStrictEqual({})
});
})<file_sep>import { compareCountAsc, compareCountDesc,
comparePointsAsc, comparePointsDesc,
compareValidityAsc, compareValidityDesc } from './CompareForSort';
describe('Calc component', () => {
it('compareCountAsc', () => {
let a = {cardCount: 10};
let b = {cardCount: 20};
let comp = compareCountAsc(a, b)
expect(comp).toBe(-1);
let c = {cardCount: 20};
let d = {cardCount: 10};
let comp1 = compareCountAsc(c, d)
expect(comp1).toBe(1);
})
it('comparePointsAsc', () => {
let a = {cardPoints: 10};
let b = {cardPoints: 20};
let comp = comparePointsAsc(a, b)
expect(comp).toBe(-1);
let c = {cardPoints: 20};
let d = {cardPoints: 10};
let comp1 = comparePointsAsc(c, d)
expect(comp1).toBe(1);
})
it('comparePointsDesc', () => {
let a = {cardPoints: 10};
let b = {cardPoints: 20};
let comp = comparePointsDesc(a, b)
expect(comp).toBe(1);
let c = {cardPoints: 20};
let d = {cardPoints: 10};
let comp1 = comparePointsDesc(c, d)
expect(comp1).toBe(-1);
})
it('compareCountDesc', () => {
let a = {cardCount: 10};
let b = {cardCount : 20};
let comp = compareCountDesc(a, b)
expect(comp).toBe(1);
let c = {cardCount: 20};
let d = {cardCount : 10};
let comp1 = compareCountDesc(c, d)
expect(comp1).toBe(-1);
})
it('compareValidityAsc', () => {
let a = {cardExpiryDate: new Date()};
let b = {cardExpiryDate: new Date()+1};
let comp = compareValidityAsc(a, b)
expect(comp).toBe(0);
})
it('compareValidityDesc', () => {
let a = {cardExpiryDate: new Date()};
let b = {cardExpiryDate: new Date() + 1};
let comp = compareValidityDesc(a, b)
expect(comp).toBe(0);
})
});<file_sep># React-training-Project-2021<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import Profile from './profile';
describe('Profile component', () => {
it('should render snapshot', () => {
let props = {
detailsObject:{
email: '',
first_name: '',
last_name: '',
socialProfileLink: '',
picture: '',
balance_points: 100
}
}
let wrapper = shallow(<Profile {...props}/>);
expect(wrapper).toBeTruthy();
})
it('should not have socialProfileLink', () => {
let props = {
detailsObject:{
email: '',
first_name: '',
last_name: '',
picture: '',
balance_points: 100
}
}
let wrapper = shallow(<Profile {...props}/>);
expect(wrapper.find('label').at(2).text()).toBe('Balance Points');
})
});<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import GiftShowContainer from './GiftShowContainer';
import configureMockStore from 'redux-mock-store'
import fetchMock from 'fetch-mock';
import thunk from 'redux-thunk'
import * as actions from '../state/actions'
import {Provider} from 'react-redux';
const middlewares = [thunk]
const mockStore = configureMockStore(middlewares)
describe("GiftShowContainer Section Test", () => {
let wrapper;
beforeEach(() => {
wrapper = shallow(<Provider store={mockStore}><GiftShowContainer /></Provider>);
});
afterEach(() => {
fetchMock.restore();
})
const state = {
gifts:{
giftCard: {}
},
user: {
UserDetails: {}
},
login: {
detailsObject: {},
loginStatus: {}
}
}
const store1 = mockStore(state);
it("should load GiftShowContainer Component", () => {
wrapper = shallow(<Provider store={store1}><GiftShowContainer /></Provider>);
expect(wrapper).not.toBe(null);
});
// it("should call componentDidMount", () =>{
// wrapper = mount(<Provider store={store1}><GiftShowContainer /></Provider>);
// expect(wrapper..componentDidMount()).toHaveBeenCalled();
// })
});<file_sep>import React from 'react';
import InputTypeComponent from './inputTypeComponent';
import { mount, shallow } from 'enzyme';
import {
FormControl,
Input,
InputAdornment,
InputLabel,
FormHelperText,
IconButton
} from "@material-ui/core";
describe('InputTypeComponent component', () => {
it('should render snapshot', () => {
let props = {
fieldFullWidth: '100%',
inputError: '',
style: {},
inputDisabled: false,
inputId: 'id',
inputType: 'text',
inputValue: '',
inputPlaceholder: 'Please enter',
endAdornment: true,
endAdornmentIcon: '',
startAdornment: true,
startAdornmentText: '',
inputHelperText: 'check it',
handleEndAdornmentButtonClick: jest.fn()
}
let wrapper = shallow(<InputTypeComponent {...props}/>)
expect(wrapper.find(Input)).toBeTruthy();
})
it('should render snapshot', () => {
let props = {
fieldFullWidth: '100%',
inputError: '',
style: {},
inputDisabled: false,
inputId: 'id',
inputType: 'text',
inputValue: '',
inputPlaceholder: 'Please enter',
endAdornment: false,
endAdornmentIcon: '',
startAdornment: false,
startAdornmentText: '',
inputHelperText: 'check it',
handleEndAdornmentButtonClick: jest.fn(),
handleInputChange: jest.fn()
}
let wrapper = shallow(<InputTypeComponent {...props}/>)
wrapper.find(Input).simulate('change');
expect(wrapper.find(Input).props().inputId).toBe();
})
});<file_sep>import axios from "axios";
import thunk from 'redux-thunk'
import configureMockStore from 'redux-mock-store'
import {
createUser, login, logout
} from "./index";
import { LOGIN, LOGOUT, CREATE_USER } from "./types";
jest.mock('axios');
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
describe('All Header actions', function description() {
it ("On login Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.get.mockResolvedValueOnce({data:[{ id: 1, name: "Prajakta" }]})
const expectedAction = [
{
type: LOGIN,
payload: { id: 1, name: "Prajakta" }
}
]
let user = {
"id": '',
"email": '',
"first_name": '',
"last_name": '',
"picture":''
}
await store.dispatch(createUser(user));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
})
it ("On login Action trigger", async () => {
let state = {};
const store = mockStore(state);
axios.get.mockResolvedValueOnce({data:[]});
axios.post.mockResolvedValueOnce({data:[{ id: 1, name: "Prajakta" }]})
const expectedAction = [
{
type: CREATE_USER,
payload: [{ id: 1, name: "Prajakta" }]
}
]
let user = {
"id": '',
"email": '',
"first_name": '',
"last_name": '',
"picture":''
}
await store.dispatch(createUser(user));
const getAction = store.getActions();
expect(getAction).toEqual(expectedAction);
let loggOUT = logout();
expect(loggOUT).not.toBe(null);
})
});<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import GiftsSend from './giftsSend';
describe('GiftsSend component', () => {
it('should render component', () =>{
let props = {
classes:{
root:{
margin:'0px'
}
},
fetchSentCards: jest.fn(),
data:[
{
cardName:'',
cardPoints:'',
senderEmail:'',
cardIssueDate:'',
cardExpiryDate: ''
}
],
totalCount:1
}
let wrapper = shallow(<GiftsSend {...props}/>);
expect(wrapper).not.toBe(null);
});
});<file_sep>import usersReducer from "./usersReducer";
import { RECEIVED_CARDS, SENT_CARDS, USER_DETAILS, REDEEM_CARD, UPDATE_BALANCE, UPDATE_TRANSACT } from './../actions/types';
describe ("UsersReducer Reducer Tests", () => {
it ("should be empty state", () => {
let state = {
cards: [],
UserDetails: []
};
expect(usersReducer ({
cards: [],
UserDetails: []
}, {})).toEqual(state)
});
it("when action type is RECEIVED_CARDS", () =>{
let state = {
cards: [],
UserDetails: []
};
expect(usersReducer (state, {type:RECEIVED_CARDS,
payload:{data:[{id:1, name:'new card1'},{id:2, name:'new card2'}], totalCount:2}}))
.toEqual({cards: [{id:1, name:'new card1'},{id:2, name:'new card2'}],
totalCount:2, UserDetails: []})
});
it("when action type is SENT_CARDS", () =>{
let state = {
cards: [],
UserDetails: []
};
expect(usersReducer (state, {type:SENT_CARDS,
payload:{data:[{id:1, name:'new card1'},{id:2, name:'new card2'}], totalCount:2}}))
.toEqual({cards: [{id:1, name:'new card1'},{id:2, name:'new card2'}],
totalCount:2, UserDetails: []})
});
it("when action type is USER_DETAILS", () =>{
let state = {
cards: [],
UserDetails: []
};
expect(usersReducer (state, {type:USER_DETAILS,
payload:{userId:1, userName:'new user1'}}))
.toEqual({cards: [], UserDetails: {userId:1, userName:'new user1'}})
});
it("when action type is UPDATE_BALANCE", () =>{
let state = {
cards: [],
UserDetails: []
};
expect(usersReducer (state, {type:UPDATE_BALANCE,
payload:{userId:1, userName:'new user1', balance:500}}))
.toEqual({cards: [], UserDetails: {userId:1, userName:'new user1',balance: 500}})
});
it("when action type is UPDATE_TRANSACT", () =>{
let state = {
cards: [],
UserDetails: []
};
expect(usersReducer (state, {type:UPDATE_TRANSACT,
payload:[{id:1, name:'new card1'},{id:2, name:'new card2'}]}))
.toEqual({cards: [{id:1, name:'new card1'},{id:2, name:'new card2'}], UserDetails: []})
});
it("when action type is REDEEM_CARD", () =>{
let state = {
cards: [{id:1, name:'new card1'},{id:2, name:'new card2'}],
UserDetails: []
};
expect(usersReducer (state, {type:REDEEM_CARD, payload: {id:1, name:'new card1'}}))
.toEqual({cards: [{id:1, name:'new card1', isRedeemed: true},{id:2, name:'new card2'}], UserDetails: []})
});
it("when action type is REDEEM_CARD when card is not present", () =>{
let state = {
cards: [{id:1, name:'new card1'},{id:2, name:'new card2'}],
UserDetails: []
};
expect(usersReducer (state, {type:REDEEM_CARD, payload: {id:3, name:'new card3'}}))
.toEqual({cards: [{id:1, name:'new card1'},{id:2, name:'new card2'}], UserDetails: []})
});
it ("should be same state", () => {
let state = [];
expect(usersReducer(state, {type:'__UNKNOWN'})).toStrictEqual({})
});
})<file_sep>export const apiURL = 'http://localhost:5000'
export const adminEmail = [
"<EMAIL>",
]<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
import {Provider} from 'react-redux';
import GiftsListContainer from './GiftsListContainer';
import fetchMock from 'fetch-mock';
const middlewares = [thunk]
const mockStore = configureMockStore(middlewares)
describe("GiftsListContainer Section Test", () => {
let wrapper;
beforeEach(() => {
wrapper = shallow(<Provider store={mockStore}><GiftsListContainer /></Provider>);
});
afterEach(() => {
fetchMock.restore();
})
const state = {
giftCards:{
gifts:{
giftCards: []
}
},
totalCount:{
gifts:{
totalCount:0
}
},
giftCardsFiltered: {
gifts:{
giftCardsFiltered:[]
}
},
userDetails: {
login: {
detailsObject: {}
}
}
}
const store1 = mockStore(state);
it("should load GiftsListContainer Component", () => {
let wrapper = shallow(<Provider store={store1}><GiftsListContainer /></Provider>);
expect(wrapper).not.toBe(null);
});
// it("should load ComponentDidmount", () => {
// const spy = jest.spyOn(GiftsListContainer.WrappedComponent.prototype, 'componentDidMount');
// let props = {
// fetchCards: jest.fn()
// }
// let wrap = shallow(<Provider store={store1}>
// <GiftsListContainer store={store1} {...props}/>
// </Provider>);
// expect(spy).toHaveBeenCalled();
// wrap.unmount();
// });
});
<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import Header from "./Header";
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
import * as actions from '../state/actions'
import {Provider} from 'react-redux';
import Button from '@material-ui/core/Button';
import AppBar from "@material-ui/core/AppBar";
import Toolbar from "@material-ui/core/Toolbar";
import Typography from "@material-ui/core/Typography";
import fetchMock from 'fetch-mock';
const middlewares = [thunk]
const mockStore = configureMockStore(middlewares)
describe("Header Section Test", () => {
let wrapper;
beforeEach(() => {
wrapper = shallow(<Provider store={mockStore}><Header /></Provider>);
});
afterEach(() => {
fetchMock.restore();
})
const user = {
"id": "106899629700260704678",
"email": "<EMAIL>",
"first_name": "Prajakta",
"last_name": "Charde",
"picture": "https://lh3.googleusercontent.com/a-/AOh14GjtYgJumsRsIoorjBCTY43WWA10CzkLWOq7hwidzA=s96-c",
"balance_points": 5000
};
const expectedActions = [
actions.login(user)
]
const state = {
login:{
loginStatus:true,
userDetails: user
}
}
const store1 = mockStore(state);
it("should load Header Component", () => {
wrapper = shallow(<Provider store={store1}><Header /></Provider>);
expect(wrapper).not.toBe(null);
});
it("if main header element present ", () => {
wrapper = shallow(<Provider store={store1}>
<Header />
</Provider>);
expect(wrapper.find(AppBar)).toBeDefined();
expect(wrapper.find(Toolbar)).toBeDefined();
expect(wrapper.find(Typography)).toBeDefined();
});
it('should present home button', () =>{
wrapper = mount(<Provider store={store1}>
<Header />
</Provider>);
expect(wrapper.find(Button).at(0).text()).toBe('YOYOGift')
});
it('should present gift received button', () =>{
wrapper = mount(<Provider store={store1}>
<Header />
</Provider>);
expect(wrapper.find(Button).at(1).text()).toBe('GIFTS RECEIVED');
});
it('should present gift send button', () =>{
wrapper = mount(<Provider store={store1}>
<Header />
</Provider>);
expect(wrapper.find(Button).at(2).text()).toBe('GIFTS SENT');
});
it('should present gift received button', () =>{
wrapper = mount(<Provider store={store1}>
<Header />
</Provider>);
expect(wrapper.find(Button).at(3).text()).toBe('MY PROFILE');
});
});<file_sep>import React from 'react';
import { shallow, mount } from 'enzyme';
import GiftsReceived from './giftsReceived';
describe('GiftsReceived component', () => {
it('should render component', () =>{
let props = {
classes:{
root:{
margin:'0px'
}
},
fetchSentCards: jest.fn(),
data:[
{
cardName:'',
cardPoints:'',
senderEmail:'',
cardIssueDate:'',
cardExpiryDate: '',
cardShortDesc: '',
isRedeemed: true
}
],
totalCount:1
}
let wrapper = shallow(<GiftsReceived {...props}/>);
expect(wrapper).not.toBe(null);
});
});<file_sep>import React from 'react';
import Footer from './Footer';
import { mount, shallow } from 'enzyme';
import PropTypes from 'prop-types';
import AppBar from "@material-ui/core/AppBar";
import Toolbar from "@material-ui/core/Toolbar";
describe('Footer component', () => {
it('should render snapshot', () => {
Footer.propTypes = {
classes: PropTypes.object.isRequired
};
let cla = {root:{margin:'10px'}}
let wrapper = shallow(<Footer classes={ cla }/>);
expect(wrapper.find(AppBar)).toBeTruthy();
expect(wrapper.find(Toolbar)).toBeTruthy();
})
});<file_sep>import React from 'react';
import DatePickers from './datePicker';
import PropTypes from 'prop-types';
import { mount, shallow } from 'enzyme';
import TextField from '@material-ui/core/TextField';
describe('DatePickers component', () => {
it('should render snapshot', () => {
DatePickers.propTypes = {
classes: PropTypes.object.isRequired
};
let classesss = {container:{}, label:{}, defaultValue:{}, textField:{}}
let wrapper = shallow(<DatePickers classes={classesss} />)
expect(wrapper.find(TextField)).toBeTruthy();
})
}); | 97dcc0b93431671879f1a04745bd9ab483651939 | [
"JavaScript",
"Markdown"
] | 23 | JavaScript | prajcharde/React-training-Project-2021 | 77ebc5ad4557c2399adf54e1bce83248207b3e6b | 17178f730db23328184fc1ec9f747fb11424160f | |
refs/heads/master | <repo_name>Aklil/PlayingField<file_sep>/PlayingField-ejb/src/main/java/com/distributedapplication/OrderFacadeLocal.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.distributedapplication;
import javax.ejb.Local;
/**
*
* @author Aklil
*/
@Local
public interface OrderFacadeLocal {
void check(Order Order) throws Exception;
Order process(final Order Order);
Order shipping(final Order Order);
}
<file_sep>/PlayingField-web/src/main/webapp/static/js/main.js
var storeApp = angular.module('storeApp',[
'ngRoute',
'ngResource']);
var SERVER_URL = "http://localhost:8080/PlayingField-web";
//http://localhost:8383/LaptopStoreClient/index.html#/login
storeApp.config(['$routeProvider',
function($routeProvider) {
$routeProvider
.when('/index', {
templateUrl: 'partials/login.html',
controller: 'LoginController'
})
.otherwise({redirectTo: '/index'});
}]);
storeApp.factory('RestService',function($http){
return{
getMessage: function(){
return $http.get(SERVER_URL+"/message.htm");
}
};
});
storeApp.controller('LoginController',function($scope,$route,RestService){
$scope.testMessage="";
RestService.getMessage().success(function(data,status){
if(status == 200){
console.log("Message:"+ data);
$scope.testMessage = data;
}
}).error(function(data,status){
console.log("Error Fetching data"+status);
});
});
<file_sep>/PlayingField-ejb/src/main/java/com/distributedapplication/TestSingletonFacade.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.distributedapplication;
import javax.ejb.Singleton;
/**
*
* @author Aklil
*/
@Singleton
public class TestSingletonFacade implements TestSingletonFacadeLocal {
String name;
@Override
public String getCustomerName() {
return this.name;
}
// Add business logic below. (Right-click in editor and choose
// "Insert Code > Add Business Method")
@Override
public void setCustomerName(String name) {
this.name = name;
}
}
| 7959d650ea36a1bb48ded9495d7d7b193b8e433d | [
"JavaScript",
"Java"
] | 3 | Java | Aklil/PlayingField | 38b459eaff7298747d0ded9a71fa447606afb772 | 337dfede3d074fe1c45d7d4d51914b998accec7f | |
refs/heads/master | <file_sep>06026111 PRACTICAL NoSQL DATABASE (1/2019) Project
ใช้ PHP ในการควบคุมฐานข้อมูล MongoDB
<file_sep><html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<title>Welcome to our Database Website</title>
</head>
<body>
<center><h1>Shoes</h1></center><br>
<form action="" method="POST">
<label for='brand'>Select your Brand :</label>
<select>
<option id='brand' name='brand' value='0'>-----</option>
<option id='brand' name='brand' value='1'>Hello World!</option>
</select>
<button>Search!</buntton>
</form>
<?php
$conn = new MongoDB\Driver\Manager("mongodb://localhost:27017");
$query = new MongoDB\Driver\Query([]);
$rows = $conn->executeQuery("PND_Project.shoes", $query);
if (isset($_POST['submit'])){
foreach ($rows as $row){
echo "<div class='card'><div class='card-body'>";
echo "Brand : ". $row->brand . "<br>";
echo "Model : ". $row->model . "<br>";
echo "</div></div>";
}
}
?>
</body>
</html> | 1859a16e2f96853880aced777cd8a8c57783a5a1 | [
"Markdown",
"PHP"
] | 2 | Markdown | SophonWongyai/PND_Project | bfdeb5d677d835154f57241b8e48390ba7fdd608 | 9bcf9bd9369a870e48149db1d325c3d3ea7a4b8d | |
refs/heads/master | <repo_name>gbzan/sim-xrf<file_sep>/python/snr/__init__.py
__author__ = "Yue"
__all__ = ["snr"]<file_sep>/Include/global.hpp
/* global.hpp */
#ifndef GLOABAL_HPP
#define GLOABAL_HPP
extern vector<double> ev_xrf, y_xrf;
#endif<file_sep>/python/__init__.py
__author__ = 'Yue'
__all__ = ["pyapi"]
path = __path__[0]
libpath = path + '/../Lib'
# Load .pyconfig file
configf = open(path + '/../.pyconfig')
config = {}
for line in configf.readlines():
key, value = line.split('=')
try:
config[key.strip()] = [int(x) for x in value.split('#')[0].split(',')]
except ValueError:
config[key.strip()] = [float(x) for x in value.split('#')[0].split(',')]
configf.close()
plt_kwargs = {key: value for key, value in config.items()}
del (plt_kwargs['nout'])<file_sep>/src/dose.cpp
//dose.cpp
#include <cmath>
#include "dose.hpp"
#include "constants.hpp"
Dose::Dose(const Sample & sp, const Illumination & il) : n_photons(_n_photons), beam_cross_section(_beam_cross_section), dose_vec(_dose_vec), total_dose(_total_dose)
{
_n_photons = il.n_photons;
_beam_cross_section = il.beam_cross_section;
_total_dose = 0;
double _n_photons_calc, _beam_cross_section_calc;
if ( n_photons > 0 && beam_cross_section > 0)
{
_n_photons_calc = n_photons;
_beam_cross_section_calc = beam_cross_section;
}
else
{
_n_photons_calc = 1;
_beam_cross_section_calc = 1;
}
double joules_absorbed, matrix_mass, n_photons_in=_n_photons_calc, n_photons_out;
for (auto ml : sp.layer_vec)
{
if (ml.thickness > 0)
{
n_photons_out = n_photons_in*std::exp(-ml.mac_tot(il.ev0)*ml.thickness);
joules_absorbed = (n_photons_in - n_photons_out)*il.ev0*eV_in_Joules;
matrix_mass = ml.thickness*_beam_cross_section_calc*ml.density*1000/std::sin(il.psi);
_dose_vec.push_back(joules_absorbed/matrix_mass);
_total_dose += dose_vec.back();
n_photons_in = n_photons_out;
}
else
_dose_vec.push_back(-1);
}
}
Dose::~Dose()
{
}
Dose & Dose::operator=(const Dose & d)
{
if (this != &d)
{
_n_photons = d.n_photons;
_beam_cross_section = d.beam_cross_section;
_dose_vec = d.dose_vec;
_total_dose = d.total_dose;
}
return *this;
}
void Dose::show() const
{
out(std::cout);
}
void Dose::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Radiation dosage: # # # # #" << std::endl;
if ( n_photons > 0 && beam_cross_section > 0)
{
ost << "# For " << n_photons << " photon(s) incident, "
<< "beam cross-sectional area = " << beam_cross_section << " cm^2 "
<< "(d = " << std::sqrt(beam_cross_section/Pi)*2e7 << " nm):" << std::endl;
}
else
ost << "# Dose per incident photon number density (1 photon/cm^2)" << std::endl;
for (int i=0; i<dose_vec.size(); i++)
ost << "# \tLayer #" << i+1 << ":\t" << dose_vec[i] << " Gy" << std::endl;
ost << "# \tTotal:\t" << total_dose << " Gy" << std::endl;
ost << "# # # # # End of radiation dosage # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}
<file_sep>/Include/geometry.hpp
//geometry.hpp
#ifndef GEOMETRY_HPP
#define GEOMETRY_HPP
#include <vector>
#include <ostream>
#include "constants.hpp"
#include "sample.hpp"
void spherical2cartesian(const double & r, const double & theta, const double & beta, double & x, double & y, double & z); //Convert spherical coordinates (r,theta_rad,beta_rad) to Cartesian coordinates (x,y,z).
double arctan2(const double & x);
void rotationx(const double & rad, double & x, double & y, double & z);
void rotationy(const double & rad, double & x, double & y, double & z);
class Illumination
{
private:
double _ev0;
double _psi;
double _alpha;
double _n_photons;
double _beam_cross_section;
public:
Illumination(double ev0_ = 1e4, double psi_ = Pi/4, double alpha_ = 0, double n_photons_=-1, double beam_cross_section_=-1);
~Illumination();
Illumination & operator=(const Illumination & il);
const double & ev0;
const double & psi;
const double & alpha;
const double & n_photons;
const double & beam_cross_section;
double psi_prime(const double & theta, const double & beta) const;
void show() const;
void out(std::ostream & ost) const;
};
class solid_angle
{
private:
double _angle_range[4];
std::vector<double> _theta;
std::vector<double> _beta;
double _theta_inc;
double _beta_inc;
double _subtend;
void update();
public:
solid_angle();
solid_angle(const double *ar, double th_inc = Pi/180, double be_inc = Pi/180);
~solid_angle();
solid_angle & operator=(const solid_angle & sa);
// const std::vector<double> & get_theta(double th_inc = -1);
// const std::vector<double> & get_beta(double be_inc = -1);
const double* const angle_range = _angle_range;
const std::vector<double> & theta;
const std::vector<double> & beta;
const double & theta_inc;
const double & beta_inc;
const double & subtend;
double domega(const double & theta) const;
void show() const;
void out(std::ostream & ost) const;
};
double atten_mono(const double & ev0,
const double & ev,
const double & psi,
const double & psiprime,
const Monolayer & c);
double atten_refl(const double & ev0,
const double & ev,
const double & psi,
const double & psiprime,
const Monolayer & ml);
double atten_trans_in(const double & ev0,
const double & psi,
const Monolayer & ml);
double atten_trans_out(const double & ev,
const double & psiprime,
const Monolayer & ml);
#endif<file_sep>/src/cs.cpp
#include <cmath>
#include "xraylib.h"
#include "constants.hpp"
#include "math.hpp"
inline double ev2nm(double ev) //Convert photon energy (in ev) to wavelength (in nm).
{
return 1239.84187/ev;
}
double theta2x(double theta_rad, double ev) //theta in rad -> x in cm^-1
{
return sin(theta_rad/2.)/(1.e-7*ev2nm(ev));
}
/*differential cross section for single electron*/
double thomson_pol(double theta_rad, double beta_rad)
{
return sq(r_e)*(sq(cos(beta_rad)*cos(theta_rad))+sq(sin(beta_rad)));
}
double thomson_unpol(double theta_rad)
{
return sq(r_e)*(1+sq(cos(theta_rad)))/2.;
}
double dcs_rayleigh_pol(double theta_rad, double beta_rad, double ev, int Z)
{
return sq(FF_Rayl(Z, 1e-8*theta2x(theta_rad, ev)))*thomson_pol(theta_rad, beta_rad);
}
double dcs_rayleigh_unpol(double theta_rad, double ev, int Z)
{
return sq(FF_Rayl(Z, 1e-8*theta2x(theta_rad, ev)))*thomson_unpol(theta_rad);
}
/* Compton scattering cross sections */
/* compton dcs of single electron */
//dcs
double ev_scattered(double theta_rad, double ev) //return the energy in ev of the Compton scattered photons.
{
return ev/(1+ev/m_e*(1-cos(theta_rad)));
}
double klein_nishina_unpol(double theta_rad, double ev) //Differential Klein-Nishina cross section in cm^2 for unpolarized radiation.
{
double ev1 = ev_scattered(theta_rad, ev);
return sq(r_e*(ev1/ev))/2.*(ev1/ev+ev/ev1-sq(sin(theta_rad)));
}
double klein_nishina_pol(double theta_rad, double beta_rad, double ev) //Differential Klein-Nishina cross section in cm^2 for polarized radiation.
{
double ev1 = ev_scattered(theta_rad, ev);
return sq(r_e*(ev1/ev))/2.*(ev1/ev+ev/ev1-2*sq(sin(theta_rad)*cos(beta_rad)));
}
//total cs
double klein_nishina_total_col(double ev) // Total Klein-Nishina collision cross section in cm^2.
{
double a = ev/m_e;
return 2*Pi*sq(r_e)*((1+a)/(a*a*a)*(2*a*(1+a)/(1+2*a)-log(1+2*a))+log(1+2*a)/(2*a)-(1+3*a)/sq(1+2*a));
}
double klein_nishina_total_sca(double ev) //Total Klein-Nishina scattering cross section in cm^2.
{
double a = ev/m_e;
return Pi*sq(r_e)*(log(1+2*a)/(a*a*a)+2*(1+a)*(2*sq(a)-2*a-1)/sq(a*(1+2*a))+8*sq(a)/(3*sq(1+2*a)*(1+2*a)));
}
//compton dcs of atom
double dcs_compton_pol(double theta_rad, double beta_rad, double ev, int Z) //Differential Compton cross section in cm^2 for polarized radiation.
{
return klein_nishina_pol(theta_rad, beta_rad, ev)*SF_Compt(Z, theta2x(theta_rad,ev)*1e-8);
}
double dcs_compton_unpol(double theta_rad, double ev, int Z) //Differential Compton cross section in cm^2 for unpolarized radiation.
{
return klein_nishina_unpol(theta_rad, ev)*SF_Compt(Z, theta2x(theta_rad,ev)*1e-8);
}<file_sep>/Include/xrf.hpp
/* xrf.hpp */
#ifndef XRF_HPP
#define XRF_HPP
#include <vector>
using namespace std;
#define K_LINES -29
#define L_LINES -113
#define M_LINES -219
#define N_LINES -324
#define O_LINES -374
#define P_LINES -383
vector<int> mac_xrf(double ev0, int Z, vector<double> & ev_vec, vector<double> & y_vec, double weight = 1.0, int line_end = P_LINES);
#endif
<file_sep>/Include/dose.hpp
//dose.hpp
#ifndef DOSE_HPP
#define DOSE_HPP
#include <vector>
#include <iostream>
#include "geometry.hpp"
#include "sample.hpp"
class Dose
{
private:
double _n_photons;
double _beam_cross_section;
double _total_dose;
std::vector<double> _dose_vec;
public:
Dose(const Sample & sp, const Illumination & il);
~Dose();
Dose & operator=(const Dose & d);
const double & n_photons;
const double & beam_cross_section;
const double & total_dose;
const std::vector<double> & dose_vec;
void show() const;
void out(std::ostream & ost) const;
};
#endif<file_sep>/simpy.py
# simpy.py
import python.pyapi as pyapi
from python import plt_kwargs
if __name__ == "__main__":
import sys
import time
# Input and output file names
try:
input_file = sys.argv[1]
try:
output_file = sys.argv[2]
except IndexError:
output_file = "./output.txt"
except IndexError:
input_file = "./input.txt"
output_file = "./output.txt"
start = time.time()
spec = pyapi.calc(input_file=input_file, output_file=output_file)
print 'Calculation finished.'
print "Time used:", time.time() - start, 'sec.'
spec.show(**plt_kwargs)
# import matplotlib.pyplot as plt
# plt.plot(spec.y_sep.T)
# plt.plot(spec.y_vec)
# plt.yscale('log')
# plt.ylim(1e-14,1e-4)
# plt.legend(spec.labels+['total'])
# plt.show()<file_sep>/src/input.cpp
/*input.cpp*/
#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include "input.hpp"
#include "constants.hpp"
#include "math.hpp"
inline std::string trim_comment(const std::string & s, const std::string & delimiter="#")
{
if (s.empty())
return s;
else
return s.substr(0, s.find(delimiter));
}
inline std::string trim_right(
const std::string & s,
const std::string & delimiters = " \f\n\r\t\v" )
{
if (s.empty())
return s;
return s.substr(0, s.find_last_not_of(delimiters) + 1);
}
inline std::string trim_left(
const std::string & s,
const std::string & delimiters = " \f\n\r\t\v" )
{
if (s.empty())
return s;
return s.substr(s.find_first_not_of(delimiters));
}
inline std::string trim(
const std::string & s,
const std::string & delimiters = " \f\n\r\t\v" )
{
return trim_left(trim_right(trim_comment(s), delimiters), delimiters);
}
inline std::vector<int> parse_vec_int(const std::string & s)
{
std::vector<int> v;
std::istringstream is(s);
std::string t;
while(std::getline(is, t, ','))
// {
// std::cout << trim(t) << std::endl;
v.push_back(std::stoi(trim(t)));
// }
return v;
}
inline std::vector<double> parse_vec_double(const std::string & s)
{
std::vector<double> v;
std::istringstream is(s);
std::string t;
while(std::getline(is, t, ','))
v.push_back(std::stod(trim(t)));
return v;
}
inline bool parse_parameter(const std::string & line, std::string & name, std::string & value, const std::string & sep = "=")
{
std::size_t pos = line.find(sep);
if (pos == std::string::npos)
return false;
else
{
name = trim(line.substr(0, pos));
value = trim(line.substr(pos+1));
return true;
}
}
inline double deg_to_rad(const double & deg)
{
return deg*Pi/180;
}
void readfile(std::string filename, Sample & sp, Illumination & il, solid_angle & omega, Detector & det)
{
std::ifstream fin;
std::string s;
fin.open(filename.c_str());
std::size_t pos;
while (getline(fin, s))
{
if (trim(s).empty())
continue;
else if (s.find("Sample:") != std::string::npos)
{
std::cout << "Reading in sample data..." << std::endl;
// read sample
while (getline(fin, s) && !trim(s).empty())
{
int layer = std::stoi(trim(s.substr(s.find("layer")+5, s.find(":")-6)));
std::cout << "Reading layer #" << layer << "..." << std::endl;
//read layer
std::vector<int> Z_vec;
std::vector<double> p_vec;
double thickness, density;
while (getline(fin, s) && !trim(s).empty())
{
std::string name, value;
if (parse_parameter(s, name, value))
{
if (name == "Z")
{
Z_vec = parse_vec_int(value);
}
else if (name == "p")
{
p_vec = parse_vec_double(value);
}
else if (name == "thickness")
{
thickness = std::stod(value);
}
else if (name == "density")
{
density = std::stod(value);
}
}
}
sp.add_layer(Monolayer(Z_vec, p_vec, density, thickness, layer));
}
}
else if (s.find("Detector:") != std::string::npos)
{
std::cout << "Reading in detector data..." << std::endl;
// read detector
double ev_offset, ev_gain, n_channels,
noise, fano, gamma, fs, ft,
thickness, density;
std::string material;
while (getline(fin, s) && !trim(s).empty())
{
std::string name, value;
if(parse_parameter(s, name, value))
{
if (name == "ev_offset")
{
ev_offset = std::stod(value);
}
else if (name == "ev_gain")
{
ev_gain = std::stod(value);
}
else if (name == "n_channels")
{
n_channels = std::stod(value);
}
else if (name == "noise")
{
noise = std::stod(value);
}
else if (name == "fano")
{
fano = std::stod(value);
}
else if (name == "gamma")
{
gamma = std::stod(value);
}
else if (name == "fs")
{
fs = std::stod(value);
}
else if (name == "ft")
{
ft = std::stod(value);
}
else if (name == "material")
{
material = value;
}
else if (name == "thickness")
{
thickness = std::stod(value);
}
else if (name == "density")
{
density = std::stod(value);
}
}
det = Detector(Channel(ev_offset, ev_gain, n_channels),
Response(noise, fano, gamma, fs, ft, ev_gain),
Window(material, thickness, density));
}
}
else if (s.find("Illumination:") != std::string::npos)
{
std::cout << "Reading in illumination data..." << std::endl;
// read illumination
double ev0, psi, alpha, n_photons=-1, beam_cross_section=-1;
while (getline(fin, s) && !trim(s).empty())
{
std::string name, value;
if(parse_parameter(s, name, value))
{
if (name == "ev0")
{
ev0 = std::stod(value);
}
else if (name == "psi")
{
psi = deg_to_rad(std::stod(value));
}
else if (name == "alpha")
{
alpha = deg_to_rad(std::stod(value));
}
else if (name == "n_photons")
{
n_photons = std::stod(value);
}
else if (name == "beam_cross_section")
{
beam_cross_section = std::stod(value);
}
else if (name == "beam_diameter")
{
beam_cross_section = sq(std::stod(value)/2.)*Pi;
}
}
}
il = Illumination(ev0, psi, alpha, n_photons, beam_cross_section);
}
else if (s.find("Solid angle:") != std::string::npos)
{
std::cout << "Reading in solid angle data..." << std::endl;
// read solid angle
std::vector<double> ar;
double theta_inc, beta_inc;
while (getline(fin, s) && !trim(s).empty())
{
std::string name, value;
if(parse_parameter(s, name, value))
{
if (name == "angle_range")
{
ar = parse_vec_double(value);
for (std::vector<double>::iterator i = ar.begin(); i < ar.end(); i++)
*i = deg_to_rad(*i);
}
else if (name == "theta_inc")
{
theta_inc = deg_to_rad(std::stod(value));
}
else if (name == "beta_inc")
{
beta_inc = deg_to_rad(std::stod(value));
}
}
}
omega = solid_angle(ar.data(), theta_inc, beta_inc);
}
}
std::cout << "Done!" << std::endl;
}
<file_sep>/python/snr/pysnip.py
import numpy as np
from scipy.optimize import curve_fit
def FWHM(x, noise=100, fano=0.114):
sigma = np.sqrt((noise / 2.3548) ** 2 + 3.58 * fano * x)
return 2.3548 * sigma
def fit_FWHM(x, F):
def _FWHM(x, noise, fano):
return (noise / 2.3548) ** 2 + 3.58 * fano * x
popt, pcov = curve_fit(_FWHM, x, (F / 2.3548) ** 2, p0=[100, 0.114])
return popt
def energy_to_channel(energy, offset=2.97, gain=12.26952):
return 1. * (energy - offset) / gain
# # # Low statistics digital filter
def lsdf(E, y, FWHM=FWHM,
f=1.5,
A=75,
M=10,
r=1.3):
def _reduce(x, length_start):
for i in range(length_start):
length = length_start - i
if x < length:
raise IndexError
L = y[x - length:x].sum()
R = y[x + 1:x + length + 1].sum()
S = y[x] + L + R
slope = (R + 1.) / (L + 1.)
if S < M or S < A * np.sqrt(y[x]) or (1. / r <= slope <= r):
return S / (2. * length + 1)
print 'Not found for x = %d!' % x
return y[x]
y_out = y.copy()
for x in range(len(E)):
try:
len_0 = int(energy_to_channel(f * FWHM(E[x]), E[0], E[1] - E[0]))
y_out[x] = _reduce(x, len_0)
except IndexError:
pass
return y_out
# # # Peak-clipping
def snip(E, y, FWHM=FWHM, offset=0., gain=10., **kwargs):
det = kwargs.get('detector')
loops = kwargs.get('loops', 24)
end_loops = kwargs.get('end_loops', 8)
reduce_factor = kwargs.get('reduce_factor', np.sqrt(2))
factor = kwargs.get('factor', 2)
if det is not None:
FWHM = det.response.FWHM
offset = det.channel.offset
gain = det.channel.gain
def G(y):
return np.log(np.log(y + 1) + 1)
def w(x, factor=2):
return energy_to_channel(factor * FWHM(E[x]), offset=offset, gain=gain)
def G_inv(z):
return np.exp(np.exp(z) - 1) - 1
z_out = G(y)
for i in range(loops):
if i >= loops - end_loops:
factor /= 1. * reduce_factor
z = z_out.copy()
for x in range(len(E)):
try:
_w = w(x, factor=factor)
if _w > x:
raise IndexError
z_bar = (z[x + _w] + z[x - _w]) / 2.
z_out[x] = min(z[x], z_bar)
except IndexError:
pass
return G_inv(z_out)
<file_sep>/src/detector.cpp
/*detector.cpp*/
#include <iostream>
#include<cmath>
#include "xraylib.h"
#include "detector.hpp"
#include "math.hpp"
#include "constants.hpp"
#define MP 1
Channel::Channel(double ev_offset_, double ev_gain_, int n_channels_)
: ev_offset(_ev_offset), ev_gain(_ev_gain), n_channels(_n_channels)
{
_ev_offset = ev_offset_;
_ev_gain = ev_gain_;
_n_channels = n_channels_;
}
Channel & Channel::operator=(const Channel & ch)
{
if (this != & ch)
{
_ev_offset = ch.ev_offset;
_ev_gain = ch.ev_gain;
_n_channels = ch.n_channels;
}
return *this;
}
int Channel::ev_to_channel(double ev) const
{
return int((ev - ev_offset)/ev_gain + 0.5);
}
void Channel::bin(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned) const
{
if (y_binned.size() != n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
#pragma omp parallel for if(MP)
for (int i=0; i<ev_raw.size(); i++)
y_binned[ev_to_channel(ev_raw[i])] += y_raw[i];
}
void Channel::bin(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned) const
{
if (y_binned.size() != n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
y_binned[ev_to_channel(ev_raw)] += y_raw;
}
void Channel::bin(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate, const std::vector<int> & row) const
{
if (y_binned.size() != n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
int n_old = y_separate.size();
y_separate.resize(n_old+n_channels);
#pragma omp parallel for collapse(2) if(MP)
for (int i = 0; i < row.size()-1; i++)
for (int j = row[i]; j < ((row[i+1] > 0) ? row[i+1] : ev_raw.size()); j++)
{
int chn = ev_to_channel(ev_raw[j]);
y_binned[chn] += y_raw[j];
// for separate output
y_separate[n_old + i*n_channels + chn] = y_raw[j];
}
}
void Channel::bin(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate) const
{
if (y_binned.size() != n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
int n_old = y_separate.size();
y_separate.resize(n_old+n_channels);
int chn = ev_to_channel(ev_raw);
y_binned[chn] += y_raw;
y_separate[n_old+chn] = y_raw;
}
void Channel::show() const
{
out(std::cout);
}
void Channel::out(std::ostream & ost) const
{
ost << "# Detector channel parameters:" << std::endl;
ost << "# \tev_offset = " << ev_offset << std::endl;
ost << "# \tev_gain = " << ev_gain << std::endl;
ost << "# \tn_channels = " << n_channels << std::endl;
ost << std::endl;
}
Response::Response(double noise_,
double fano_,
double gamma_,
double fs_,
double ft_,
double ev_gain_)
: noise(_noise), fano(_fano), gamma(_gamma), fs(_fs), ft(_ft), ev_gain(_ev_gain)
{
_noise = noise_;
_fano = fano_;
_gamma = gamma_;
_fs = fs_;
_ft = ft_;
_ev_gain = ev_gain_;
}
Response & Response::operator=(const Response & r)
{
if (this != & r)
{
_fano = r.fano;
_gamma = r.gamma;
_fs = r.fs;
_ft = r.ft;
_ev_gain = r.ev_gain;
}
return *this;
}
void Response::set_gain(const double ev_gain)
{
_ev_gain = ev_gain;
}
double Response::FWHM(double ev)
{
return 2.3548*std::sqrt(sq(noise/2.3548)+3.58*fano*ev);
}
void Response::show() const
{
out(std::cout);
}
void Response::out(std::ostream & ost) const
{
ost << "# Detector response parameters:" << std::endl;
ost << "# \tfano = " << fano << std::endl;
ost << "# \tgamma = " << gamma << std::endl;
ost << "# \tfs = " << fs << std::endl;
ost << "# \tft = " << ft << std::endl;
ost << std::endl;
}
Window::Window(std::string material_ ,
double thickness_,
double density_)
: material(_material), thickness(_thickness), density(_density)
{
_material = material_;
_thickness = thickness_;
_density = density_;
}
Window & Window::operator=(const Window & w)
{
if (this != & w)
{
_material = w.material;
_thickness = w.thickness;
_density = w.density;
}
return *this;
}
double Window::transmission(double ev) const
{
double _mac = CS_Total_CP(material.c_str(), ev/1000.);
return std::exp(-_mac*density*thickness);
}
void Window::show() const
{
out(std::cout);
}
void Window::out(std::ostream & ost) const
{
ost << "# Detector window parameters:" << std::endl;
ost << "# \tmaterial = " << material << std::endl;
ost << "# \tthickness = " << thickness << std::endl;
ost << "# \tdensity = " << density << std::endl;
ost << std::endl;
}
Detector::Detector(Channel channel_,
Response response_,
Window window_)
: channel(_channel), response(_response), window(_window)
{
_channel = channel_;
_response = response_;
_window = window_;
_response.set_gain(channel.ev_gain);
}
Detector & Detector::operator=(const Detector & d)
{
if (this != & d)
{
_channel = d.channel;
_response = d.response;
_window = d.window;
}
return *this;
}
void Detector::genspec(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned, bool det_response, bool det_window) const
{
if (y_binned.size() != channel.n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
//apply detector response
if (det_response)
{
std::cout << "Generating spectrum with detector response..." << std::endl;
double sigma, y_temp;
for (std::vector<double>::const_iterator ev = ev_raw.begin(), y = y_raw.begin();
ev < ev_raw.end(); ev++, y++)
{
sigma = std::sqrt(sq(response.noise/2.3548)+3.58*response.fano*(*ev));
y_temp = *y;
//apply detector filter window
if (det_window)
y_temp *= window.transmission(*ev);
#pragma omp parallel for if(MP)
for (int i = 0; i < y_binned.size(); i++)
{
double ev_ch = channel.ev_offset + i*channel.ev_gain;
//Gaussian
y_binned[i] += y_temp*channel.ev_gain/(sigma*std::sqrt(2*Pi))*std::exp(-sq((*ev)-ev_ch)/(2*sq(sigma)));
//Step function
y_binned[i] += response.fs*y_temp*channel.ev_gain/(2.*(*ev))*std::erfc((ev_ch-(*ev))/(std::sqrt(2)*sigma));
//Tailing function
y_binned[i] += response.ft*y_temp*channel.ev_gain/(2.*response.gamma*sigma*std::exp(-1./(2*sq(response.gamma))))*std::exp((ev_ch-(*ev))/(response.gamma*sigma))*std::erfc((ev_ch-(*ev))/(std::sqrt(2)*sigma)+1/(std::sqrt(2)*response.gamma));
}
}
}
else
{
std::cout << "Generating spectrum without detector response..." << std::endl;
if (det_window)//apply detector filter window
#pragma omp parallel for if(MP)
for (int i = 0; i < ev_raw.size(); i++)
y_binned[channel.ev_to_channel(ev_raw[i])] += (y_raw[i])*window.transmission(ev_raw[i]);
else
channel.bin(ev_raw, y_raw, y_binned);
}
}
void Detector::genspec(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned, bool det_response, bool det_window) const
{
if (y_binned.size() != channel.n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
//apply detector response
if (det_response)
{
std::cout << "Generating spectrum with detector response..." << std::endl;
double sigma, y_temp;
sigma = std::sqrt(sq(response.noise/2.3548)+3.58*response.fano*ev_raw);
y_temp = y_raw;
//apply detector filter window
if (det_window)
y_temp *= window.transmission(ev_raw);
for (int i = 0; i < y_binned.size(); i++)
{
double ev_ch = channel.ev_offset + i*channel.ev_gain;
//Gaussian
y_binned[i] += y_temp*channel.ev_gain/(sigma*std::sqrt(2*Pi))*std::exp(-sq(ev_raw-ev_ch)/(2*sq(sigma)));
//Step function
y_binned[i] += response.fs*y_temp*channel.ev_gain/(2.*ev_raw)*std::erfc((ev_ch-ev_raw)/(std::sqrt(2)*sigma));
//Tailing function
y_binned[i] += response.ft*y_temp*channel.ev_gain/(2.*response.gamma*sigma*std::exp(-1./(2*sq(response.gamma))))*std::exp((ev_ch-ev_raw)/(response.gamma*sigma))*std::erfc((ev_ch-ev_raw)/(std::sqrt(2)*sigma)+1/(std::sqrt(2)*response.gamma));
}
}
else
{
std::cout << "Generating spectrum without detector response..." << std::endl;
if (det_window)//apply detector filter window
y_binned[channel.ev_to_channel(ev_raw)] += y_raw*window.transmission(ev_raw);
else
channel.bin(ev_raw, y_raw, y_binned);
}
}
void Detector::genspec(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate, const std::vector<int> & row, bool det_response, bool det_window) const
{
if (y_binned.size() != channel.n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
//apply detector response
if (det_response)
{
std::cout << "Generating spectrum with detector response..." << std::endl;
std::vector<double>::iterator ys0; // ys0 pointing to the first channel
int n_old = y_separate.size();
y_separate.resize(n_old+(row.size()-1)*channel.n_channels);
ys0 = y_separate.begin() + n_old;
int i = 0;
double sigma, y_temp;
for (std::vector<double>::const_iterator ev = ev_raw.begin(), y = y_raw.begin();
ev < ev_raw.end(); ev++, y++)
{
// for separate output
if (ev - ev_raw.begin() == row[i+1])
i++;
sigma = std::sqrt(sq(response.noise/2.3548)+3.58*response.fano*(*ev));
y_temp = *y;
//apply detector filter window
if (det_window)
y_temp *= window.transmission(*ev);
#pragma omp parallel for if(MP)
for (int j = 0; j < y_binned.size(); j++)
{
double y_temp1 = 0;
double ev_ch = channel.ev_offset + j*channel.ev_gain;
//Gaussian
y_temp1 += y_temp*channel.ev_gain/(sigma*std::sqrt(2*Pi))*std::exp(-sq((*ev)-ev_ch)/(2*sq(sigma)));
//Step function
y_temp1 += response.fs*y_temp*channel.ev_gain/(2.*(*ev))*std::erfc((ev_ch-(*ev))/(std::sqrt(2)*sigma));
//Tailing function
y_temp1 += response.ft*y_temp*channel.ev_gain/(2.*response.gamma*sigma*std::exp(-1./(2*sq(response.gamma))))*std::exp((ev_ch-(*ev))/(response.gamma*sigma))*std::erfc((ev_ch-(*ev))/(std::sqrt(2)*sigma)+1/(std::sqrt(2)*response.gamma));
// Total output
y_binned[j] += y_temp1;
//Separate output
*(ys0 + i*channel.n_channels + j) += y_temp1;
}
}
}
else
{
std::cout << "Generating spectrum without detector response..." << std::endl;
std::vector<double>::iterator ys0; // ys0 pointing to the first channel
int n_old = y_separate.size();
y_separate.resize(n_old+(row.size()-1)*channel.n_channels);
ys0 = y_separate.begin() + n_old;
int i = 0;
if (det_window)//apply detector filter window
{
#pragma omp parallel for collapse(2) if(MP)
for (int i = 0; i < row.size()-1; i++)
for (int j = row[i]; j < ((row[i+1] > 0) ? row[i+1] : ev_raw.size()); j++)
{
int chn = channel.ev_to_channel(ev_raw[j]);
double y_temp = (y_raw[j])*window.transmission(ev_raw[j]);
y_binned[chn] += y_temp;
// for separate output
*(ys0 + i*channel.n_channels + chn) = y_temp;
}
}
else
channel.bin(ev_raw, y_raw, y_binned, y_separate, row);
}
}
void Detector::genspec(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate, bool det_response, bool det_window) const
{
if (y_binned.size() != channel.n_channels)
{
std::cout << "Error: Bin size does not match the number of channels!" << std::endl;
return;
}
//apply detector response
if (det_response)
{
std::cout << "Generating spectrum with detector response..." << std::endl;
std::vector<double>::iterator ys0; // ys0 pointing to the first channel
int n_old = y_separate.size();
y_separate.resize(n_old+channel.n_channels);
ys0 = y_separate.begin() + n_old;
double sigma, y_temp;
sigma = std::sqrt(sq(response.noise/2.3548)+3.58*response.fano*ev_raw);
y_temp = y_raw;
//apply detector filter window
if (det_window)
y_temp *= window.transmission(ev_raw);
#pragma omp parallel for if(MP)
for (int i = 0; i < y_binned.size(); i++)
{
double y_temp1 = 0;
double ev_ch = channel.ev_offset + i*channel.ev_gain;
//Gaussian
y_temp1 += y_temp*channel.ev_gain/(sigma*std::sqrt(2*Pi))*std::exp(-sq(ev_raw-ev_ch)/(2*sq(sigma)));
//Step function
y_temp1 += response.fs*y_temp*channel.ev_gain/(2.*ev_raw)*std::erfc((ev_ch-ev_raw)/(std::sqrt(2)*sigma));
//Tailing function
y_temp1 += response.ft*y_temp*channel.ev_gain/(2.*response.gamma*sigma*std::exp(-1./(2*sq(response.gamma))))*std::exp((ev_ch-ev_raw)/(response.gamma*sigma))*std::erfc((ev_ch-ev_raw)/(std::sqrt(2)*sigma)+1/(std::sqrt(2)*response.gamma));
// Total output
y_binned[i] += y_temp1;
//Separate output
*(ys0+i) = y_temp1;
}
}
else
{
std::cout << "Generating spectrum without detector response..." << std::endl;
std::vector<double>::iterator ys0; // ys0 pointing to the first channel
int n_old = y_separate.size();
y_separate.resize(n_old+channel.n_channels);
ys0 = y_separate.begin() + n_old;
if (det_window)//apply detector filter window
{
int chn = channel.ev_to_channel(ev_raw);
double y_temp = y_raw*window.transmission(ev_raw);
y_binned[chn] += y_temp;
*(ys0+chn) = y_temp;
}
else
channel.bin(ev_raw, y_raw, y_binned, y_separate);
}
}
void Detector::show() const
{
out(std::cout);
}
void Detector::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Detector parameters # # # # #" << std::endl;
channel.out(ost);
response.out(ost);
window.out(ost);
ost << "# # # # # End of detector parameters # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}<file_sep>/python/snr/snr.py
import numpy as np
import snip
def FWHM(x, noise=100, fano=0.114):
sigma = np.sqrt((noise / 2.3548) ** 2 + 3.58 * fano * x)
return 2.3548 * sigma
def pnb(ev, y, bg, peak_center, peak_width, bg_range=None, normalize=True):
"""
:param ev: Energy array in eV.
:param y: Total spectrum array. First dimension must be energy/channel number.
:param bg: Background spectrum array. First dimension must be energy/channel number.
:param peak_center: peak center in eV.
:param peak_width: peak width in eV.
:param bg_range: Range in which background is sampled. List with length of multiples of two.
:param normalize: Boolean. If true, background is normalized with respect to the width of bg compared to width of y.
:return: P,B in P/B.
"""
def ev2ch(x):
return np.abs(ev - x).argmin()
p = np.array([ev2ch(peak_center - peak_width / 2.), ev2ch(peak_center + peak_width / 2.)])
if bg_range is None:
b = p
else:
b = np.array([ev2ch(x) for x in bg_range])
Nt = np.sum(y[p[0]:p[1]])
B = 0
nchB = 0
for i in range(0, len(b), 2):
B += np.sum(bg[b[i]:b[i + 1]])
nchB = b[i + 1] - b[i]
if normalize:
B *= 1. * (p[1] - p[0]) / nchB
return Nt - B, B
def pnb_fixedwidth(ev, y, peak_center, peak_width, bg_range):
return pnb(ev, y, y, peak_center, peak_width, bg_range)
def pnb_snip(ev, y, peak_center, peak_width=100, FWHM=snip.FWHM, offset=0., gain=10., **kwargs):
bg = snip.snip(ev, y, FWHM, offset, gain, **kwargs)
return pnb(ev, y, bg, peak_center, peak_width)
def pnb_ascalc(ev, y, total, peak_center, peak_width):
return pnb(ev, total, total - y, peak_center, peak_width, normalize=False)
def snr(P, B):
return P/np.sqrt(P + 2*B)<file_sep>/src/cwrapper.cc
#include <iostream>
#include <fstream>
#include <string>
#include "spectrum.hpp"
#include "input.hpp"
#include "dose.hpp"
extern "C"
{
// __declspec(dllexport)
void sim(char * input_file,
char * output_file,
// Total
double * y_vec,
// Separate
double * y_sep,
// XRF
int * Z_vec,
int * row,
int * lines,
double * xrf_ev,
double * xrf_y,
// Compton
double * comp_ev,
double * comp_y,
// Rayleigh
double * ray_y,
// Detector
double * det_,
int * n_channels,
char * win_mat,
// Illumination
double * il_,
// Solid angle
double * sa,
double * dose,
int * nout);
}
void sim(char * input_file,
char * output_file,
// Total
double * y_vec,
// Separate
double * y_sep,
// XRF
int * Z_vec,
int * row,
int * lines,
double * xrf_ev,
double * xrf_y,
// Compton
double * comp_ev,
double * comp_y,
// Rayleigh
double * ray_y,
// Detector
double * det_,
int * n_channels,
char * win_mat,
// Illumination
double * il_,
// Solid angle
double * sa,
// Radiation dose
double * dose,
int * nout)
{
Sample sp;
Illumination il;
solid_angle omega;
Detector det;
// Read input file
readfile(input_file, sp, il, omega, det);
// Calculate spectrum
Spectrum spec(sp, il, omega, det, true, true, true);
Dose ds(sp, il);
// Save results to file
static std::ofstream fout;
fout.open(output_file);
//Outputs
spec.out(fout);
fout << std::endl;
ds.out(fout);
//Inputs
il.out(fout);
det.out(fout);
omega.out(fout);
sp.out(fout);
fout.close();
// Return results
// Total
if (spec.y_vec.size() > nout[0])
std::cerr << "Error: Output for y_vec out of range! ("
<< nout[0] << " given, needs "
<< spec.y_vec.size() << ".)" << std::endl;
else
for (auto i : spec.y_vec)
*(y_vec++) = i;
// *y_vec = -1;
// Separate
if (spec.y_sep.size() > (nout[1]+2)*nout[0])
std::cerr << "Error: Output for y_sep out of range! ("
<< (nout[1]+2)*nout[0] << " given, needs "
<< spec.y_sep.size() << ".)" << std::endl;
else
for (auto i : spec.y_sep)
*(y_sep++) = i;
// *y_sep = -1;
// XRF
if (spec.xrf.Z_vec.size() > nout[1])
std::cerr << "Error: Output for xrf.Z_vec out of range! ("
<< nout[1] << " given, needs "
<< spec.xrf.Z_vec.size() << ".)" << std::endl;
else
{
for (auto i : spec.xrf.Z_vec)
*(Z_vec++) = i;
// *Z_vec = -1;
for (auto i : spec.xrf.row)
*(row++) = i;
// *row = -1;
}
if (spec.xrf.lines.size() > nout[2])
std::cerr << "Error: Output for xrf.lines out of range! ("
<< nout[2] << " given, needs "
<< spec.xrf.lines.size() << ".)" << std::endl;
else
{
for (auto i : spec.xrf.lines)
*(lines++) = i;
// *lines = -1;
for (auto i : spec.xrf.ev_vec)
*(xrf_ev++) = i;
// *xrf_ev = -1;
for (auto i : spec.xrf.y_vec)
*(xrf_y++) = i;
// *xrf_y = -1;
}
// Compton
if (spec.comp.ev_vec.size() > nout[3])
std::cerr << "Error: Output for comp.ev_vec out of range! ("
<< nout[3] << " given, needs "
<< spec.comp.ev_vec.size() << ".)" << std::endl;
else
{
for (auto i : spec.comp.ev_vec)
*(comp_ev++) = i;
// *comp_ev = -1;
for (auto i : spec.comp.y_vec)
*(comp_y++) = i;
// *comp_y = -1;
}
// nout
nout[0] = spec.y_vec.size();
nout[1] = spec.xrf.Z_vec.size();
nout[2] = spec.xrf.lines.size();
nout[3] = spec.comp.ev_vec.size();
nout[4] = ds.dose_vec.size();
// Rayleigh
*ray_y = spec.ray.y;
// Detector
*(det_++) = det.channel.ev_offset;
*(det_++) = det.channel.ev_gain;
*n_channels = det.channel.n_channels;
*(det_++) = det.response.noise;
*(det_++) = det.response.fano;
*(det_++) = det.response.gamma;
*(det_++) = det.response.fs;
*(det_++) = det.response.ft;
*(det_++) = det.window.thickness;
*det_ = det.window.density;
// for (int i = 0; i < det.window.material.length(); i++)
// *(win_mat++) = int(det.window.material.at(i));
// *(win_mat) = int('\n');
for (auto i: det.window.material)
*(win_mat++) = i;
// Illumination
*(il_++) = il.ev0;
*(il_++) = il.psi;
*(il_++) = il.alpha;
*(il_++) = il.n_photons;
*(il_) = il.beam_cross_section;
// Solid angle
for (int i = 0; i < 4; i++)
*(sa++) = omega.angle_range[i];
*(sa++) = omega.theta_inc;
*(sa) = omega.beta_inc;
// Radiation dosage
for (auto i : ds.dose_vec)
*(dose++) = i;
return;
}
<file_sep>/Include/input.hpp
/*input.hpp*/
#ifndef INPUT_HPP
#define INPUT_HPP
#include "detector.hpp"
#include "geometry.hpp"
#include "sample.hpp"
void readfile(std::string filename, Sample & sp, Illumination & il, solid_angle & omega, Detector & det);
#endif<file_sep>/python/tools/xraylib.py
__author__ = 'Yue'
import ctypes
from .. import libpath
try:
lib = ctypes.cdll.LoadLibrary(libpath + '/../xraylib/Lib/linux/libxrl.so')
except OSError:
lib = ctypes.cdll.LoadLibrary(libpath + '/../xraylib/Lib/windows/libxrl-7.dll')
lib.CS_Total_CP.restype = ctypes.c_double
lib.CS_Total_CP.argtypes = [ctypes.c_char_p, ctypes.c_double]
def CS_Total_CP(CP, kev):
assert isinstance(CP, str)
return lib.CS_Total_CP(CP, kev)
def ElementDensity(*args):
return lib.ElementDensity(*args)
def SymbolToAtomicNumber(*args):
return lib.SymbolToAtomicNumber(*args)<file_sep>/python/classes/__init__.py
__author__ = 'Yue'
__all__ = ["detector", "geometry", "spectrum"]<file_sep>/Include/math.hpp
/*math.hpp*/
#ifndef MATH_HPP
#define MATH_HPP
namespace
{
inline double sq(double x)
{
return x*x;
}
}
#endif<file_sep>/variables.mk
# # # variables used in makefile
# Makefile Paths
mkfile_path := $(shell dirname $(abspath $(lastword $(MAKEFILE_LIST))))
INCLUDE_PATH = $(abspath $(mkfile_path)/Include)
LIB_PATH = $(abspath $(mkfile_path)/Lib)
# XrayLib paths
XRL_PATH = "D:\Program Files\xraylib 64-bit"
# C++11 compiler
g++ = g++
# Others
ifeq ($(OS),Windows_NT)
#Windows stuff
# RM = del
DLIB = dll
EXE = exe
XRL_INCLUDE = $(XRL_PATH)/Include
XRL_LIB = $(XRL_PATH)/Lib
else
#Linux stuff
# RM = rm
DLIB = so
EXE = out
XRL_INCLUDE = $(XRL_PATH)/include
XRL_LIB = $(XRL_PATH)/src/.lib
endif
<file_sep>/src/sample.cpp
//sample.cpp
#include "xraylib.h"
#include "cs.hpp"
#include "constants.hpp"
#include "sample.hpp"
//Compound
Compound::Compound() : Z_vec(_Z_vec), p_vec(_p_vec), molecular_weight(_molecular_weight)
{
}
Compound::Compound(const std::vector<int> & Z_vec_, const std::vector<double> & p_vec_) : Z_vec(_Z_vec), p_vec(_p_vec), molecular_weight(_molecular_weight)
{
_Z_vec = Z_vec_;
_p_vec = p_vec_;
set_mw();
}
Compound::~Compound()
{
}
Compound & Compound::operator=(const Compound & c)
{
if (this == &c)
return *this;
_Z_vec = c.Z_vec;
_p_vec = c.p_vec;
set_mw();
return *this;
}
void Compound::set_mw()
{
_molecular_weight = 0;
for (int i=0; i<Z_vec.size(); i++)
_molecular_weight += p_vec[i]*AtomicWeight(Z_vec[i]);
}
double Compound::mac_tot(const double & ev) const
{
double mac = 0;
for (int i=0; i<Z_vec.size(); i++)
mac += CS_Total(Z_vec[i], ev/1e3)*p_vec[i]*AtomicWeight(Z_vec[i])/molecular_weight;
return mac;
}
double Compound::dmac_rayleigh_pol(const double & ev, const double & theta, const double & beta) const
{
double dmac = 0;
for (int i=0; i<Z_vec.size(); i++)
dmac += dcs_rayleigh_pol(theta, beta, ev, Z_vec[i])*p_vec[i]*N_A/molecular_weight;
return dmac;
}
double Compound::dmac_compton_pol(const double & ev, const double & theta, const double & beta) const
{
double dmac = 0;
for (int i=0; i<Z_vec.size(); i++)
dmac += dcs_compton_pol(theta, beta, ev, Z_vec[i])*p_vec[i]*N_A/molecular_weight;
return dmac;
}
void Compound::show() const
{
out(std::cout);
}
void Compound::out(std::ostream & ost) const
{
ost << "# \tZ = ";
for (auto Z : Z_vec)
ost << Z << ", ";
ost << std::endl;
ost << "# \tp = ";
for (auto p : p_vec)
ost << p << ", ";
ost << std::endl;
ost << "# \tMolecular weight = ";
ost << molecular_weight;
ost << std::endl;
}
//Monolayer
Monolayer::Monolayer() : Compound::Compound(), density(_density), thickness(_thickness), layer(_layer)
{
}
// Copy constructor: used when passing through function calls
Monolayer::Monolayer(const Monolayer & ml) : Compound::Compound(), density(_density), thickness(_thickness), layer(_layer)
{
operator=(ml);
}
Monolayer::Monolayer(const std::vector<int> & _Z_vec, const std::vector<double> & _p_vec, const double & density_, const double & thickness_, const double & layer_) : Compound::Compound(_Z_vec, _p_vec), density(_density), thickness(_thickness), layer(_layer)
{
_density = density_;
_thickness = thickness_;
_layer = layer_;
}
Monolayer::~Monolayer()
{
}
// overloaded assignment operator
Monolayer & Monolayer::operator=(const Monolayer & ml)
{
if (this == &ml)
return *this;
Compound::operator=(ml);
_density = ml.density;
_thickness = ml.thickness;
_layer = ml.layer;
return *this;
}
void Monolayer::set_layer(int i)
{
_layer = i;
}
void Monolayer::show() const
{
out(std::cout);
}
void Monolayer::out(std::ostream & ost) const
{
ost << "# Parameters for layer #" << layer << ":" << std::endl;
Compound::out(ost);
ost << "# \tDensity = " << density << std::endl;
ost << "# \tThickness = " << thickness << std::endl;
}
//Sample
Sample::Sample() : layer_vec(_layer_vec), nlayers(_nlayers)
{
update();
}
Sample::~Sample()
{
}
Sample & Sample::operator=(const Sample & s)
{
if (this == & s)
return *this;
_layer_vec = s.layer_vec;
_nlayers = s.nlayers;
return *this;
}
void Sample::update()
{
for (int i = 0; i < _layer_vec.size(); i++)
_layer_vec[i].set_layer(i);
_nlayers = _layer_vec.size();
}
void Sample::add_layer(const Monolayer & monolayer_)
{
_layer_vec.push_back(monolayer_);
if (monolayer_.layer == 0)
_layer_vec.back().set_layer(nlayers);
_nlayers++;
}
void Sample::show() const
{
out(std::cout);
}
void Sample::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Sample parameters: # # # # #" << std::endl;
ost << "# Total number of layers = " << nlayers << std::endl;
for (auto m : layer_vec)
m.out(ost);
ost << "# # # # # End of sample parameters # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}<file_sep>/src/geometry.cpp
//geometry.cpp
#include <iostream>
#include <cmath>
#include "geometry.hpp"
using std::vector;
//Illumination
Illumination::Illumination(double ev0_, double psi_, double alpha_, double n_photons_, double beam_cross_section_) : ev0(_ev0), psi(_psi), alpha(_alpha), n_photons(_n_photons), beam_cross_section(_beam_cross_section)
{
_ev0 = ev0_;
_psi = psi_;
_alpha = alpha_;
_n_photons = n_photons_;
_beam_cross_section = beam_cross_section_;
}
Illumination::~Illumination()
{
}
Illumination & Illumination::operator=(const Illumination & il)
{
if (this != &il)
{
_ev0 = il.ev0;
_psi = il.psi;
_alpha = il.alpha;
_n_photons = il.n_photons;
_beam_cross_section = il.beam_cross_section;
}
return *this;
}
void Illumination::show() const
{
out(std::cout);
}
void Illumination::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Illumination: # # # # #" << std::endl;
ost << "# ev0 = " << ev0 << std::endl;
ost << "# psi = " << psi << std::endl;
ost << "# alpha = " << alpha << std::endl;
ost << "# Number of incident photons = " << n_photons << std::endl;
ost << "# Beam cross sectional area = " << beam_cross_section << " cm^2" << std::endl;
ost << "# # # # # End of illumination # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}
void spherical2cartesian(const double & r, const double & theta, const double & beta, double & x, double & y, double & z) //Convert spherical coordinates (r,theta,beta) to Cartesian coordinates (x,y,z).
{
using std::sin;
using std::cos;
x = r*sin(theta)*cos(beta);
y = r*sin(theta)*sin(beta);
z = r*cos(theta);
}
double arctan2(const double & x) //like arctan, but the returned value is in [0,np.pi]
{
using std::atan;
double rad = atan(x);
if (rad<0)
rad += Pi;
return rad;
}
void rotationy(const double & rad, double & x, double & y, double & z)
//rotate the axes about original y axis, angle = rad.
{
if (rad)
{
using std::cos;
using std::sin;
double x1;
x1 = cos(rad)*x - sin(rad)*z;
z = sin(rad)*x + cos(rad)*z;
x = x1;
}
}
void rotationx(const double & rad, double & x, double & y, double & z)
//rotate the axes about original x axis, angle = rad.
{
if (rad)
{
using std::cos;
using std::sin;
double y1;
y1 = cos(rad)*y + sin(rad)*z;
z = -sin(rad)*y + cos(rad)*z;
y = y1;
}
}
double Illumination::psi_prime(const double & theta, const double & beta) const
{
using std::sqrt;
double theta0 = Pi/2.+psi;
double x, y, z;
spherical2cartesian(1, theta, beta, x, y, z);
rotationy(theta0, x, y, z);
rotationx(alpha, x, y, z);
double theta_prime = arctan2(sqrt(x*x+y*y)/(1.*z));
return Pi/2.-theta_prime;
}
// solid_angle
solid_angle::solid_angle():theta(_theta), beta(_beta), subtend(_subtend), theta_inc(_theta_inc), beta_inc(_beta_inc)//, angle_range(_angle_range)
{
_angle_range[0] = 0;
_angle_range[1] = Pi;
_angle_range[2] = 0;
_angle_range[3] = Pi/2;
_theta_inc = Pi/180;
_beta_inc = Pi/180;
update();
}
solid_angle::solid_angle(const double *ar, double th_inc, double be_inc):theta(_theta), beta(_beta), subtend(_subtend), theta_inc(_theta_inc), beta_inc(_beta_inc)//, angle_range(_angle_range)
{
for (int i=0; i<4; i++)
_angle_range[i] = ar[i];
_theta_inc = th_inc;
_beta_inc = be_inc;
update();
}
solid_angle::~solid_angle()
{
}
solid_angle & solid_angle::operator=(const solid_angle & sa)
{
if (this == &sa)
return *this;
for (int i=0; i<4; i++)
_angle_range[i] = sa.angle_range[i];
_theta_inc = sa._theta_inc;
_beta_inc = sa._beta_inc;
update();
return *this;
}
double solid_angle::domega(const double & theta) const
{
return std::sin(theta)*_theta_inc*_beta_inc;
}
void solid_angle::update()
{
_theta.clear();
for (double x = angle_range[0]; x <= angle_range[1]; x += _theta_inc)
_theta.push_back(x);
_beta.clear();
for (double x = angle_range[2]; x <= angle_range[3]; x += _beta_inc)
_beta.push_back(x);
_subtend = (std::cos(angle_range[0])-std::cos(angle_range[1]))*(angle_range[3]-angle_range[2]);
}
// const vector<double> & solid_angle::get_theta(double th_inc)
// {
// if ((th_inc > 0) && (th_inc != theta_inc))
// {
// theta_inc = th_inc;
// _theta.clear();
// for (double x = angle_range[0]; x <= angle_range[1]; x += theta_inc)
// _theta.push_back(x);
// }
// else if (theta.empty())
// for (double x = angle_range[2]; x <= angle_range[3]; x += theta_inc)
// _theta.push_back(x);
// return theta;
// }
// const vector<double> & solid_angle::get_beta(double be_inc)
// {
// if ((be_inc > 0) && (be_inc != beta_inc))
// {
// beta_inc = be_inc;
// _beta.clear();
// for (double x = angle_range[2]; x <= angle_range[3]; x += beta_inc)
// _beta.push_back(x);
// }
// else if (beta.empty())
// for (double x = angle_range[2]; x <= angle_range[3]; x += beta_inc)
// _beta.push_back(x);
// return beta;
// }
void solid_angle::show() const
{
out(std::cout);
}
void solid_angle::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Solid angle definition # # # # #" << std::endl;
ost << "# angle range: ";
for (int i = 0; i < 4; i++)
ost << angle_range[i] << ' ';
ost << std::endl;
ost << "# theta_inc = " << _theta_inc << std::endl;
ost << "# beta_inc = " << _beta_inc << std::endl;
ost << "# theta: ";
for (vector<double>::const_iterator i = theta.begin(); i < theta.end(); i++)
ost << *i << ' ';
ost << std::endl;
ost << "# beta: ";
for (vector<double>::const_iterator i = beta.begin(); i < beta.end(); i++)
ost << *i << ' ';
ost << std::endl;
ost << "# Total subtended solid angle: " << subtend << std::endl;
ost << "# # # # # End of solid angle definition # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}
double atten_mono(const double & ev0,
const double & ev,
const double & psi,
const double & psiprime,
const Monolayer & ml)
{
if ((psiprime < 1e-6) && (psiprime > -1e-6)) // Check if psiprime == 0
return 0;
double t = ml.thickness;
double rho = ml.density;
if (t == 0 || rho == 0)
return 1;
double mac0 = ml.mac_tot(ev0);
double mac1 = ml.mac_tot(ev);
double sp0 = std::sin(psi);
double sp1 = std::sin(psiprime);
double temp = (mac0/sp0+mac1/sp1)*rho;
if (mac0+mac1*sp0/sp1 < 1e-50 && mac0+mac1*sp0/sp1 > -1e-50) // To avoid division by zero
return 0;
if (psiprime > 0)
return (1-std::exp(-(mac0/sp0+mac1/sp1)*rho*t))/(mac0+mac1*sp0/sp1);
else
return (std::exp(mac1*rho*t/sp1)-std::exp(-mac0*rho*t/sp0))/(mac0+mac1*sp0/sp1);
}
double atten_refl(const double & ev0,
const double & ev,
const double & psi,
const double & psiprime,
const Monolayer & ml)
{
double t = ml.thickness;
double rho = ml.density;
if (t == 0 || rho == 0)
return 1;
else if (psi == 0 || psiprime == 0)
return 0;
return std::exp(-(ml.mac_tot(ev0)/std::sin(psi)+ml.mac_tot(ev)/std::sin(psiprime))*ml.density*ml.thickness);
}
double atten_trans_in(const double & ev0,
const double & psi,
const Monolayer & ml)
{
if (ml.density == 0 || ml.thickness == 0)
return 1;
else if (psi == 0)
return 0;
return std::exp(-ml.mac_tot(ev0)/std::sin(psi)*ml.density*ml.thickness);
}
double atten_trans_out(const double & ev,
const double & psiprime,
const Monolayer & ml)
{
if (ml.density == 0 || ml.thickness == 0)
return 1;
else if (psiprime == 0)
return 0;
return std::exp(ml.mac_tot(ev)/std::sin(psiprime)*ml.density*ml.thickness);
}<file_sep>/read.py
import numpy as np
import matplotlib.pyplot as plt
def read(fname):
# Read channel values
a = np.genfromtxt(fname, invalid_raise=False, skiprows=0)
# Read detector configurations and line labels
labels = ['Total']
f = open(fname)
for line in f.readlines():
if 'ev_offset' in line:
ev_offset = float(line.split('=')[1])
elif 'ev_gain' in line:
ev_gain = float(line.split('=')[1])
elif 'n_channels' in line:
n_channels = float(line.split('=')[1])
elif ('Z =' in line) and ('(' in line):
labels.append(line.split('(')[-1].strip().strip(')'))
f.close()
labels += ['Rayleigh', 'Compton']
ev = ev_offset + ev_gain * np.arange(n_channels)
return ev, a, labels
def plot(ev, a, labels, xlim=None, ylim=None, show=True):
ax = plt.subplot(111)
plt.plot(ev / 1e3, a.T)
plt.yscale('log')
plt.ylim(ylim)
plt.xlim(xlim)
plt.xlabel('E (KeV)')
plt.ylabel(r'$I(E)/I_0(E_0)$')
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
plt.legend(labels, loc='upper right', ncol=1, bbox_to_anchor=(1.35, 1), borderaxespad=0.)
if show:
plt.show()
def dose(fname):
f = open(fname)
dose = []
for line in f.readlines():
if ('Layer' in line) and ('Gy' in line):
dose.append(float(line.split(':')[-1].split()[0]))
f.close()
return np.array(dose)
if __name__ == '__main__':
import sys
from python import plt_kwargs
try:
fname = sys.argv[1]
except IndexError:
fname = 'out.txt'
# Read data file
ev, a, labels = read(fname)
# Plot spectrum
plt.figure()
plt.title('Spectrum read from %s' % fname)
plot(ev, a, labels, xlim=plt_kwargs['xlim'], ylim=plt_kwargs['ylim'])<file_sep>/src/xrf.cpp
/* xrf.cpp */
#include <iostream>
#include <vector>
#include "xraylib.h"
#define MP 1 // OpenMP
using namespace std;
#define K_LINES -29
#define L_LINES -113
#define M_LINES -219
#define N_LINES -324
#define O_LINES -374
#define P_LINES -383
vector<int> mac_xrf(double ev0, int Z, vector<double> & ev_vec, vector<double> & y_vec, double weight = 1.0, int line_end = P_LINES)
{
vector<int> lines;
int line_start;
if (ev0 >= 1e3*EdgeEnergy(Z, K_SHELL))
line_start = -1;
else if (line_end <= L_LINES && ev0 >= 1e3*EdgeEnergy(Z, L3_SHELL))
line_start = K_LINES-1;
else if (line_end <= M_LINES && ev0 >= 1e3*EdgeEnergy(Z, M5_SHELL))
line_start = L_LINES-1;
else if (line_end <= N_LINES && ev0 >= 1e3*EdgeEnergy(Z, N7_SHELL))
line_start = M_LINES-1;
else if (line_end <= O_LINES && ev0 >= 1e3*EdgeEnergy(Z, O7_SHELL))
line_start = N_LINES-1;
else if (line_end <= P_LINES && ev0 >= 1e3*EdgeEnergy(Z, P5_SHELL))
line_start = N_LINES-1;
else
{
cout << "XRF lines not found for Z = " << Z << " @ " << ev0 << " eV!" << endl;
return lines;
}
#pragma omp parallel for ordered if(MP)
for (int line = line_start; line >= line_end; line--)
{
double ev = LineEnergy(Z, line)*1e3;
if (ev > 1e-6)
{
double y = CS_FluorLine_Kissel_Cascade(Z, line, ev0/1e3);
if (y > 1e-50)
{
#pragma omp ordered
{
ev_vec.push_back(ev);
y_vec.push_back(y*weight);
lines.push_back(line);
}
}
}
}
return lines;
}
<file_sep>/python/tools/savespec.py
#savespec
import h5py
import os
def dim_scale(f,dset):
ndims = len(dset.dims)
dset.dims[ndims-2].label = 'theta'
dset.dims[ndims-1].label = 'beta'
dset.dims.create_scale(f['omega/theta'])
dset.dims.create_scale(f['omega/theta_rad'])
dset.dims.create_scale(f['omega/beta'])
dset.dims.create_scale(f['omega/beta_rad'])
dset.dims[ndims-2].attach_scale(f['omega/theta'])
dset.dims[ndims-2].attach_scale(f['omega/theta_rad'])
dset.dims[ndims-1].attach_scale(f['omega/beta'])
dset.dims[ndims-1].attach_scale(f['omega/beta_rad'])
def savespec(rawspec, filename = 'specdata.h5', dir = os.getcwd()):
if not os.path.exists(dir):
os.mkdir(dir)
rawspec_arr = flatten([rawspec])
try:
f = h5py.File(dir+filename, 'w')
f['omega/theta'] = rawspec_arr[0].omega.theta_arr
f['omega/theta_rad'] = rawspec_arr[0].omega.theta_rad_arr
f['omega/beta'] = rawspec_arr[0].omega.beta_arr
f['omega/beta_rad'] = rawspec_arr[0].omega.beta_rad_arr
f['omega/range'] = rawspec_arr[0].omega.range
f['omega/n_theta'] = rawspec_arr[0].omega.n_theta
f['omega/n_beta'] = rawspec_arr[0].omega.n_beta
for rawspec in rawspec_arr:
group = f.require_group(rawspec.stype+'/'+rawspec.name)
# try:
# group = f[rawspec.stype+'/'+rawspec.name]
# except KeyError:
# group = f.create_group(rawspec.stype+'/'+rawspec.name)
dset_list = rawspec.__dict__.keys()
dset_list.remove('stype')
dset_list.remove('name')
dset_list.remove('omega')
for dset in dset_list:
try:
group[dset] = rawspec.__getattribute__(dset)
dim_scale(f,group[dset])
except TypeError:
pass
finally:
f.close()
def readspec(filename = 'specdata.h5', dir = os.getcwd()):
try:
f = h5py.File(dir+filename, 'r')
out = []
omega = solid_angle(angle_range=f['omega/range'].value,n_theta = f['omega/n_theta'].value,n_beta = f['omega/n_beta'].value)
stype_list = f.keys()
stype_list.remove('omega')
for stype in stype_list[::-1]:
for name in f[stype]:
group = f[stype][name]
out.append(spectrum(ev_mat = group['ev_mat'], intensity_mat = group['intensity_mat'], omega = omega, name = name, stype = stype))
return out
finally:
f.close()<file_sep>/src/makefile
# makefile
include ../variables.mk
SRC_DIR = "."
OBJ_DIR = "../obj"
SRC_FILES = $(wildcard *.cpp)
OBJ_FILES = $(SRC_FILES:.cpp=.o)
all: $(OBJ_FILES) cwrapper.o $(LIB_PATH)
ar crf $(LIB_PATH)/libsim.a $(OBJ_FILES)
$(g++) $(OBJ_FILES) cwrapper.o -L$(XRL_LIB) -lxrl -shared -fPIC -o $(LIB_PATH)/libsim.$(DLIB) -fopenmp
# $(g++) cwrapper.o $(LIB_PATH)/libsim.a -L$(XRL_LIB) -lxrl -shared -fPIC -o $(LIB_PATH)/libsim.$(DLIB) -fopenmp # works for mingw-w64 on windows, not for linux.
%.o: %.cpp
@echo "============="
@echo "Compiling $<"
@$(g++) -c $< -fPIC -o $@ -I$(XRL_INCLUDE) -I$(INCLUDE_PATH) -std=c++11 -fopenmp
cwrapper.o: cwrapper.cc
@echo "============="
@echo "Compiling $<"
@$(g++) -c $< -fPIC -o $@ -I$(XRL_INCLUDE) -I$(INCLUDE_PATH) -std=c++11 -fopenmp
$(LIB_PATH):
- mkdir $(LIB_PATH)
clean:
$(RM) -rvf *.o $(LIB_PATH)/*.$(DLIB) $(LIB_PATH)/*.a all<file_sep>/python/classes/detector.py
# detector.py
import numpy as np
try:
import xraylib as xrl
except ImportError:
from ..tools import xraylib as xrl
def energy_to_channel(energy, offset=0., gain=10.):
"""
Convert energy to channel number.
:param energy:
The energy in eV.
:param offset:
Energy offset in eV. Or the energy of channel 0.
:param gain:
Energy gain in eV. The increment of energy in eV from channel to channel.
:return:
Channel number.
"""
return (energy - offset) // gain
class Channel(object):
"""
Class for detector channels.
Attributes:
ev_offset: Energy offset in eV.
ev_gain: Channel gain in eV.
n_channels: Total number of channels.
"""
def __init__(self, ev_offset=0, ev_gain=10, n_channels=2048):
self.ev_offset = ev_offset
self.ev_gain = ev_gain
self.n_channels = n_channels
self.ev_arr = np.arange(ev_offset, ev_offset + ev_gain * n_channels, ev_gain)
def channel(self, ev):
"""
Convert energy in eV to channel number.
:param ev:
The energy in eV.
:return:
Channel number
"""
return energy_to_channel(ev, self.ev_offset, self.ev_gain)
class Response(object):
"""
Class for detector response.
Attributes:
noise:
fano:
fs:
ft:
ev_gain:
"""
def __init__(self,
noise=100,
fano=0.114,
gamma=2.5,
fs=0.03,
ft=0.02,
ev_gain=None):
self.noise = noise
self.fano = fano
self.gamma = gamma
self.fs = fs
self.ft = ft
self.ev_gain = ev_gain
def FWHM(self, ev, **kwargs):
"""
FWHM function (of energy in eV).
:rtype : float
:param ev: Energy in eV.
:param kwargs:
:return:
FWHM in eV.
"""
noise = kwargs.get('noise', self.noise)
fano = kwargs.get('fano', self.fano)
sigma = np.sqrt((noise / 2.3548) ** 2 + 3.58 * fano * ev)
return 2.3548 * sigma
class Window(object):
"""
Class for detector window.
:Attributes:
material:
String. Material of the filtering window on the detector.
thickness:
Thickness of the filtering window material. In cm.
density:
Mass density of the filtering window material. In g/cm^3.
"""
def __init__(self,
material='Be',
thickness=24e-4,
density=None):
self.material = material
self.thickness = thickness
if density is None:
density = xrl.ElementDensity(xrl.SymbolToAtomicNumber(material))
if not density:
density = 1
self.density = density
def transmission(self, ev):
"""
The transmission function of the window.
:param ev:
The energy in eV of the incident beam.
:return:
The ratio of the intensity of the transmitted beam through the window vs that of the incident.
"""
_mac = xrl.CS_Total_CP(self.material, ev / 1000.)
# _mac = compound(CP=self.material).mac_total(ev)
return np.exp(-_mac * self.density * self.thickness)
class Detector(object):
"""
Class for detector.
:Attributes:
channel: A detector channel object.
response: A detector response object.
window: A detector window object.
"""
def __init__(self, channel=Channel(), response=Response(), window=Window()):
self.channel = channel
self.response = response
response.ev_gain = channel.ev_gain
self.window = window<file_sep>/python/tools/elementlookup.py
from . import __path__ as datapath
datapath = datapath[0]
def lookup(x):
"""
:param x:
:return:
"""
f = open(datapath + '/periodictable.txt', 'r') # open periodic table file
data = f.readlines()
f.close()
# dictionaries
d = {}
ivd1 = {}
ivd2 = {}
# create dictionaries from data in the file
for line in data:
Z, symbol, name = (s.strip() for s in line.split('-'))
Z = int(Z)
d[Z] = symbol, name
ivd1[symbol] = Z, name
ivd2[name] = Z, symbol
# lookup in the dictionaries
try:
x = x.title()
try:
return ivd1[x]
except KeyError:
return ivd2[x]
except AttributeError:
return d[x]
def symbol2number(Z):
if type(Z) == str:
try:
Z = int(Z)
except ValueError:
return lookup(Z)[0]
return Z
def number2symbol(Z):
if type(Z) == str:
try:
Z = int(Z)
except ValueError:
return Z
return lookup(Z)[0]
<file_sep>/makefile
# makefile
include variables.mk
all: main.o
$(MAKE) -C ./src all
$(g++) main.o -L./Lib -lsim -L$(XRL_LIB) -lxrl -o main.$(EXE) -fopenmp #-static
echo export LD_LIBRARY_PATH="$(LIB_PATH)" > init.sh
echo export OMP_NUM_THREADS=16 >> init.sh
echo export PYTHONPATH="$(mkfile_path):\$$PYTHONPATH" >> init.sh
main.o: main.cpp
$(g++) -c main.cpp -I$(INCLUDE_PATH) -I$(XRL_INCLUDE) -std=c++11 -fopenmp
clean:
$(MAKE) -C ./src clean
$(RM) -rvf *.o main.exe $(LIB_PATH)/*.a $(LIB_PATH)/*.$(DLIB) $(LIB_PATH) all
<file_sep>/main.cpp
#include <iostream>
#include <fstream>
#include <string>
#include "spectrum.hpp"
#include "dose.hpp"
#include "input.hpp"
int main(int argc, char ** argv)
{
std::string input_file = "input.txt", output_file = "out.txt";
if (argc > 2)
{
output_file = argv[2];
input_file = argv[1];
}
else if (argc > 1)
input_file = argv[1];
Sample sp;
Illumination il;
solid_angle omega;
Detector det;
// Read input file
std::cout << "Reading file \"" << input_file << "\"..." << std::endl;
readfile(input_file, sp, il, omega, det);
// Show configurations
sp.show();
omega.show();
det.show();
// Calculate spectrum
std::cout << "Calculating spectrum..." << std::endl;
Spectrum spec(sp, il, omega, det, true, true, true);
// Calculate dose
Dose ds(sp, il);
// Show results on screen
spec.show();
// Save results to file
std::cout << "Writing output into \"" << output_file << "\"..." << std::endl;
static std::ofstream fout;
fout.open(output_file);
// Outputs
spec.out(fout);
fout << std::endl;
ds.out(fout);
//Inputs
il.out(fout);
det.out(fout);
omega.out(fout);
sp.out(fout);
std::cout << "Done!" << std::endl;
return 0;
}<file_sep>/Include/sample.hpp
//sample.hpp
#ifndef SAMPLE_HPP
#define SAMPLE_HPP
#include <vector>
#include <iostream>
class Compound
{
private:
std::vector<int> _Z_vec;
std::vector<double> _p_vec;
double _molecular_weight;
void set_mw();
public:
Compound();
Compound(const std::vector<int> & Z_vec_, const std::vector<double> & p_vec_);
~Compound();
Compound & operator=(const Compound & c);
const std::vector<int> & Z_vec;
const std::vector<double> & p_vec;
const double & molecular_weight;
double mac_tot(const double & ev) const;
double dmac_rayleigh_pol(const double & ev, const double & theta, const double & beta) const;
double dmac_compton_pol(const double & ev, const double & theta, const double & beta) const;
void show() const;
void out(std::ostream & ost) const;
};
class Monolayer : public Compound
{
private:
double _density;
double _thickness;
double _layer;
public:
Monolayer();
Monolayer(const Monolayer & ml);
Monolayer(const std::vector<int> & _Z_vec, const std::vector<double> & _p_vec, const double & density_, const double & thickness_, const double & layer_ = 0);
~Monolayer();
Monolayer & operator=(const Monolayer & ml);
const double & density;
const double & thickness;
const double & layer;
void set_layer(int i);
void show() const;
void out(std::ostream & ost) const;
};
class Sample
{
private:
std::vector<Monolayer> _layer_vec;
int _nlayers;
void update();
public:
Sample();//const std::vector<Monolayer> & layer_vec_);
~Sample();
Sample & operator=(const Sample & s);
const std::vector<Monolayer> & layer_vec;
const int & nlayers;
void add_layer(const Monolayer & monolayer_);
void show() const;
void out(std::ostream & ost) const;
};
#endif<file_sep>/python/classes/spectrum.py
# spectrum.py
import matplotlib.pyplot as plt
from .detector import *
from .geometry import *
# from matplotlib.colors import LogNorm
# from matplotlib.ticker import LogFormatterMathtext
# # # #fonts# # #
# import matplotlib
# from matplotlib import rc
# matplotlib.rcParams['pdf.fonttype'] = 'truetype'
# fontProperties = {'family':'serif','serif':['Arial'],
# 'weight' : 'normal', 'size' : '12'}
# rc('font',**fontProperties)
# # # #
class Xrf(object):
def __init__(self, y_mat=None,
y_vec=None, ev_vec=None, lines=None, Z_vec=None, row=None):
"""
:param y_mat:
:param y_vec:
:param ev_vec:
:param lines:
:param Z_vec:
:param row:
"""
self.y_mat = y_mat
self.y_vec = y_vec
self.ev_vec = ev_vec
self.lines = lines
self.Z_vec = Z_vec
self.row = row
class Rayleigh(object):
def __init__(self, y_mat=None,
y=None, ev0=None):
self.y_mat = y_mat
self.y = y
self.ev0 = ev0
class Compton(object):
def __init__(self, y_mat=None,
y_vec=None, ev_vec=None):
self.y_mat = y_mat
self.y_vec = y_vec
self.ev_vec = ev_vec
class Spectrum():
def __init__(self,
y_vec=None,
y_sep=None,
labels=[],
xrf=Xrf(),
ray=Rayleigh(),
comp=Compton(),
det=Detector(),
il=Illumination(),
omega=SolidAngle()):
self.y_vec = y_vec
self.y_sep = y_sep
self.labels = labels
self.xrf = xrf
self.rayleigh = ray
self.compton = comp
self.detector = det
self.illumination = il
self.omega = omega
def show(self, **kwargs):
xlim = kwargs.get('xlim')
show_total = kwargs.get('show_total', True)
logy = kwargs.get('logy', True)
show = kwargs.get('show', True)
yshift = kwargs.get('yshift', 0)
ax = plt.subplot(111)
plt.plot(self.detector.channel.ev_arr / 1.e3, self.y_sep.T + yshift)
labels = self.labels
if show_total:
plt.plot(self.detector.channel.ev_arr / 1.e3, self.y_vec + yshift, linestyle='--')
# plt.fill_between(x.ev_arr[845:880]/1.e3,1e-18,total[845:880],color = 'black',alpha = 0.15)
labels += ['Total']
ymax = self.y_vec.max()
ylim = kwargs.get('ylim', [1e-18, 5 * ymax])
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# plt.legend(loc=0,ncol=3)
plt.legend(self.labels, ncol=1, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
if logy:
plt.yscale('log')
plt.xlim(xlim)
plt.ylim(ylim)
plt.xlabel('E (KeV)')
plt.ylabel(r'$I(E)/I_0(E_0)$')
if show:
plt.show()
def save_npz(self, filename='spec.npz'):
np.savez(filename, ev_arr=self.detector.channel.ev_arr, y_vec=self.y_vec, y_sep=self.y_sep)
def save_hdf(self, filename='spec.hdf5'):
pass
<file_sep>/python/pyapi.py
# pyapi.py
import ctypes
import os
from tools.elementlookup import number2symbol # Must be imported before loading the DLL.
from . import libpath
try:
lib = ctypes.cdll.LoadLibrary(libpath + '/libsim.so')
except OSError:
lib = ctypes.cdll.LoadLibrary(libpath + '/libsim.dll')
from .classes.spectrum import *
from . import config
lib.sim.restype = None
lib.sim.argtypes = [ctypes.c_char_p,
ctypes.c_char_p,
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_int),
ctypes.c_char_p,
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_int)]
def sim(input_file,
output_file,
y_vec,
y_sep,
Z_vec,
row,
lines,
xrf_ev,
xrf_y,
comp_ev,
comp_y,
ray_y,
det,
n_channels,
win_mat,
il,
sa,
dose,
nout):
if not os.path.isfile(input_file):
raise IOError("File %s does not exit!" % input_file)
lib.sim(ctypes.c_char_p(input_file),
ctypes.c_char_p(output_file),
y_vec.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
y_sep.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
Z_vec.ctypes.data_as(ctypes.POINTER(ctypes.c_int)),
row.ctypes.data_as(ctypes.POINTER(ctypes.c_int)),
lines.ctypes.data_as(ctypes.POINTER(ctypes.c_int)),
xrf_ev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
xrf_y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
comp_ev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
comp_y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
ray_y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
det.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
n_channels.ctypes.data_as(ctypes.POINTER(ctypes.c_int)),
win_mat,
il.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
sa.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
dose.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
nout.ctypes.data_as(ctypes.POINTER(ctypes.c_int)))
def calc(input_file="input.txt",
output_file="output.txt",
nout=config['nout'], return_dose=False): # N of channels, N of Z, N of lines, N of thetas, N of layers
y_vec = np.zeros(nout[0])
y_sep = np.zeros(nout[0] * (nout[1] + 2))
Z_vec = np.zeros(nout[1], dtype=np.dtype(ctypes.c_int))
row = np.zeros(nout[1] + 2, dtype=np.dtype(ctypes.c_int))
lines = np.zeros(nout[2], dtype=np.dtype(ctypes.c_int))
xrf_ev = np.zeros(nout[2])
xrf_y = np.zeros(nout[2])
comp_ev = np.zeros(nout[3])
comp_y = np.zeros(nout[3])
ray_y = np.zeros(1)
det = np.zeros(9)
n_channels = np.zeros(1, dtype=np.dtype(ctypes.c_int))
win_mat = ctypes.create_string_buffer(20)
il = np.zeros(5)
sa = np.zeros(6)
dose = np.zeros(nout[4])
nout = np.array(nout, dtype=np.dtype(ctypes.c_int))
_nout = nout.copy()
sim(input_file,
output_file,
y_vec,
y_sep,
Z_vec,
row,
lines,
xrf_ev,
xrf_y,
comp_ev,
comp_y,
ray_y,
det,
n_channels,
win_mat,
il,
sa,
dose,
nout)
# check out-of-range errors
if (nout > _nout).any():
raise IOError("Output out of range! \nGiven: " + str(_nout) + "\nNeeds: " + str(nout))
xrf = Xrf(None, xrf_y[:nout[2]], xrf_ev[:nout[2]], lines[:nout[2]], Z_vec[:nout[1]], row[:nout[1] + 1])
comp = Compton(None, comp_y[:nout[3]], comp_ev[:nout[3]])
ray = Rayleigh(None, ray_y, il[0])
_det = Detector(Channel(*det[:2], n_channels=n_channels),
Response(*det[2:-2], ev_gain=det[1]),
Window(win_mat, *det[-2:]))
_il = Illumination(*il)
omega = SolidAngle(sa[:4], *sa[-2:])
# labels = Z_vec[:nout[1]].tolist()
labels = [number2symbol(Z) for Z in Z_vec[:nout[1]]]
labels.append('Rayleigh')
labels.append('Compton')
if return_dose:
return Spectrum(y_vec[:n_channels], y_sep[:n_channels * (nout[1] + 2)].reshape(-1, n_channels), labels, xrf,
ray, comp, _det, _il, omega), dose[:nout[5]]
else:
return Spectrum(y_vec[:n_channels], y_sep[:n_channels * (nout[1] + 2)].reshape(-1, n_channels), labels, xrf,
ray, comp, _det, _il, omega)
# print y_vec
# print Z_vec
# print row
# print lines
# print xrf_ev
# print xrf_y
# print comp_ev
# print comp_y
# print ray_y
# print det
# print n_channels
# print win_mat
# print il
# print sa
# print nout
# spec = calc()
# print spec.detector.window.material
# print spec.labels
# print spec.y_sep[0]<file_sep>/python/classes/geometry.py
# geometry.py
import numpy as np
class SolidAngle(object):
def __init__(self, angle_range=[0, np.pi, -np.pi / 2, np.pi / 2], theta_inc=np.pi / 180, beta_inc=np.pi / 180,
unit="rad"):
"""
:param angle_range:
:param theta_inc:
:param beta_inc:
:param unit:
"""
if unit == "deg":
angle_range = np.radians(angle_range)
theta_inc = np.radians(theta_inc)
beta_inc = np.radians(beta_inc)
self.range = angle_range
self.theta_inc = theta_inc
self.beta_inc = beta_inc
self.theta_arr = np.arange(angle_range[0], angle_range[1], self.theta_inc)
self.beta_arr = np.arange(angle_range[2], angle_range[3], self.beta_inc)
self.subtend_calc = np.sum(np.sin(self.theta_arr) * self.theta_inc * self.beta_inc) * self.beta_arr.size
self.subtend = (np.cos(self.theta_arr[0]) - np.cos(self.theta_arr[-1])) * (self.beta_arr[-1] - self.beta_arr[0])
def shift(self, new_center):
pass
def slice(self, angle_range=[0, np.pi, -np.pi / 2, np.pi / 2]):
"""
:rtype : 2D array.
:param angle_range:
:return:
"""
theta_slice = np.logical_and(self.theta_arr >= angle_range[0], self.theta_arr <= angle_range[1])
beta_slice = np.logical_and(self.beta_arr >= angle_range[2], self.beta_arr <= angle_range[3])
omega_slice = np.multiply(*np.meshgrid(theta_slice, beta_slice))
return omega_slice
def domega(self, theta_rad):
return (np.cos(theta_rad - self.theta_inc / 2.) - np.cos(theta_rad + self.theta_inc / 2.)) * self.beta_inc
class Illumination(object):
def __init__(self, ev0=1e4, psi=0, alpha=0, n_photons=1, beam_cross_section=1e-12*np.pi):
self.ev0 = ev0
self.psi = psi
self.alpha = alpha
self.n_photons = n_photons
self.beam_cross_section = beam_cross_section
<file_sep>/Include/constants.hpp
//constants.hpp
#ifndef CONSTANTS_HPP
#define CONSTANTS_HPP
namespace
{
const double r_e = 2.8179403267e-13; //classical radius of electron in cm
const double m_e = 0.510998e6; //electron rest mass in eV/c^2
const double Pi = 3.1415926535897932384626433832795;
const double N_A = 6.0221413e+23; //Avogadro constant
const double eV_in_Joules = 1.60217657e-19; // 1 eV in Joules
}
#endif<file_sep>/README.md
sim-xrf
=======
This is a python/C++ program for simulating X-Ray Fluorescence (XRF) spectrum acquired at a synchrotron radiation beamline.
1. Installation
=======
The source files need to be compiled with C++11 (tested with g++ version 4.7 and mingw-w64 x86_64-4.9.1-posix-seh-rt_v3-rev1).
Xraylib with python bindings is also needed at compiling.
Xraylib is available at https://github.com/tschoonj/xraylib.
1.1. Compiling Xraylib
-------
Instructions on compiling XrayLib can be found at https://github.com/tschoonj/xraylib/wiki.
After compilation of XrayLib, change the "XRL_PATH" in "variables.mk" to the actual installation directory of XrayLib.
1.2. Compiling sim-xrf
-------
In the top directory do "make". If your C++ compiler is not called with "g++", modify the variable "g++" in "variables.mk" according to your actual compiler.
1.3. Initialization
-------
Initialize the program by running ". init.sh" before running the program.
2. Running sim-xrf
=======
Sim-xrf can be run in C++ mode and Python mode. In both modes, an input file defining the parameters needed in the calculation is required.
The output of the C++ mode is a text file; the Python mode can in addition plot the spectrum or read the output files and plot them.
2.1. Input file
-------
The input file is a text file defining the parameters needed in the calculation. See the example "input.txt" file for instructions.
2.2. Running in C++ mode
-------
On linux:
./main.out [input file name] [output file name]
On windows:
main.exe [input file name] [output file name]
2.3. Running in Python mode
-------
### 2.3.1. Direct run in command line
#### Calculation + plotting:
python simpy.py [input file name] [output file name]
#### Reading output file + plotting:
python read.py [input file name] [output file name]
### 2.3.2. Import as a module:
#### Calculate spectrum
from python.pyapi import calc
nout = 3000, 30, 500, 500 # N of channels, N of Z, N of lines, N of thetas
xlim = 0, 11
ylim = 1e-14, 1e-4
spec = calc(input_file="input.txt", output_file="output.txt")
spec.show(xlim=xlim, ylim=ylim)
#### Read/plot output files
from read import read, plot
fname = "output.txt"
xlim = 0, 11
ylim = 1e-14, 1e-4
# Read data file
ev, a, labels = read(fname)
# Plot spectrum
plt.figure()
plt.title('Spectrum read from %s' % fname)
plot(ev, a, labels, xlim=xlim, ylim=ylim)<file_sep>/__init__.py
__author__ = 'Yue'
<file_sep>/Include/cs.hpp
#ifndef CS_HPP
#define CS_HPP
double sq(double x);
double ev2nm(double ev); //Convert photon energy (in ev) to wavelength (in nm).
double theta2x(double theta_rad, double ev); //theta in rad -> x in cm^-1
/*differential cross section for single electron*/
double thomson_pol(double theta_rad, double beta_rad);
double thomson_unpol(double theta_rad);
double dcs_rayleigh_pol(double theta_rad, double beta_rad, double ev, int Z);
double dcs_rayleigh_unpol(double theta_rad, double ev, int Z);
/* Compton scattering cross sections */
/* compton dcs of single electron */
//dcs
double ev_scattered(double theta_rad, double ev); //return the energy in ev of the Compton scattered photons.
double klein_nishina_unpol(double theta_rad, double ev); //Differential Klein-Nishina cross section in cm^2 for unpolarized radiation.
double klein_nishina_pol(double theta_rad, double beta_rad, double ev); //Differential Klein-Nishina cross section in cm^2 for polarized radiation.
//total cs
double klein_nishina_total_col(double ev); // Total Klein-Nishina collision cross section in cm^2.
double klein_nishina_total_sca(double ev); //Total Klein-Nishina scattering cross section in cm^2.
//compton dcs of atom
double dcs_compton_pol(double theta_rad, double beta_rad, double ev, int Z); //Differential Compton cross section in cm^2 for polarized radiation.
double dcs_compton_unpol(double theta_rad, double ev, int Z); //Differential Compton cross section in cm^2 for unpolarized radiation.
#endif<file_sep>/Include/spectrum.hpp
//spectrum.hpp
#ifndef SPECTRUM_HPP
#define SPECTRUM_HPP
#include <vector>
#include "sample.hpp"
#include "geometry.hpp"
#include "detector.hpp"
class Xrf
{
private:
std::vector<double> _y_mat;
solid_angle _omega;
std::vector<double> _ev_vec;
std::vector<double> _y_vec;
std::vector<int> _lines;
std::vector<int> _Z_vec;
std::vector<int> _row;
public:
Xrf(); //default constructor
Xrf(double ev0, const Compound & c, double n_photons=1);
Xrf(double ev0, const Compound & c, const solid_angle omega, double n_photons=1);
Xrf(const Sample & s, const Illumination & il, const solid_angle & omega_);
// ~Xrf();
Xrf & operator=(const Xrf & x);
const std::vector<double> & y_mat;
const solid_angle & omega;
const std::vector<double> & ev_vec;
const std::vector<double> & y_vec;
const std::vector<int> & lines;
const std::vector<int> & Z_vec;
const std::vector<int> & row;
// Xrf & operator+(const Xrf & x) const;
void add(const Xrf & x, bool mat_only = false);
void sum();
void show() const;
void out(std::ostream & ost) const;
};
class Rayleigh
{
private:
std::vector<double> _y_mat;
solid_angle _omega;
double _y;
double _ev0;
public:
Rayleigh(); //default constructor
Rayleigh(double ev0_, const Compound & c, const solid_angle & omega_, double n_photons=1, bool calc_sum = true);
Rayleigh(const Sample & s, const Illumination & il, const solid_angle & omega_);
// ~Rayleigh();
Rayleigh & operator=(const Rayleigh & r);
const std::vector<double> & y_mat;
const solid_angle & omega;
const double & y;
const double & ev0;
// Rayleigh & operator+(const Rayleigh & r) const;
void add(const Rayleigh & x, bool mat_only = false);
void sum();
void show() const;
void out(std::ostream & ost) const;
};
class Compton
{
private:
double _ev0;
std::vector<double> _y_mat;
solid_angle _omega;
std::vector<double> _ev_vec;
std::vector<double> _y_vec;
public:
Compton(); //default constructor
Compton(double ev0_, const Compound & c, const solid_angle & omega_, double n_photons=1, bool calc_ev = true, bool calc_sum = true);
Compton(const Sample & s, const Illumination & il, const solid_angle & omega_);
// ~Compton();
Compton & operator=(const Compton & c);
const std::vector<double> & y_mat;
const solid_angle & omega;
const std::vector<double> & ev_vec;
const std::vector<double> & y_vec;
// Compton & operator+(const Compton & c) const;
void add(const Compton & x, bool mat_only = false);
const double & ev0;
void sum();
void ev();
void sum_ev();
void show() const;
void out(std::ostream & ost) const;
};
class Spectrum
{
private:
std::vector<double> _y_vec;
std::vector<double> _y_sep;
// std::vector<int> _Z_vec;
// std::vector<int> _lines;
// std::vector<int> _row;
Xrf _xrf;
Rayleigh _ray;
Compton _comp;
solid_angle _omega;
Sample _sample;
Illumination _illumination;
Detector _detector;
public:
Spectrum();
Spectrum(const Sample & s, const Illumination & il, const solid_angle & omega, const Detector & det, bool det_response = true, bool det_window = true, bool separate = false);
Spectrum(double ev0, const Compound & c, const solid_angle & omega, const Detector & det, bool det_response = true, bool det_window = true, bool separate = false);
// ~Spectrum();
Spectrum & operator=(const Spectrum & s);
const std::vector<double> & y_vec;
const std::vector<double> & y_sep;
const std::vector<int> & Z_vec;
const std::vector<int> & lines;
const std::vector<int> & row;
const Xrf & xrf;
const Rayleigh & ray;
const Compton & comp;
const solid_angle & omega;
const Sample & sample;
const Illumination & illumination;
const Detector & detector;
void set_geom(const Sample & s, const Illumination & il, const solid_angle & omega);
void set_det(const Detector & det);
void calc(bool self_abs = true);
void genspec(bool det_response = true);
// void genspec_raw(std::vector<double> & ev_vec_, std::vector<double> & y_vec_);
void show() const;
void out(std::ostream & ost) const;
};
#endif<file_sep>/Include/detector.hpp
/*detector.hpp*/
#ifndef DETECTOR_HPP
#define DETECTOR_HPP
#include<vector>
#include<string>
class Channel
{
private:
double _ev_offset;
double _ev_gain;
int _n_channels;
public:
Channel(double ev_offset_ = 0, double ev_gain_ = 10, int n_channels_ = 2048);
// ~Channel();
Channel & operator=(const Channel & ch);
const double & ev_offset;
const double & ev_gain;
const int & n_channels;
int ev_to_channel(double ev) const;
void bin(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned) const;
void bin(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned) const;
void bin(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate, const std::vector<int> & row) const;
void bin(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate) const;
void show() const;
void out(std::ostream & ost) const;
};
class Response
{
private:
double _noise;
double _fano;
double _gamma;
double _fs;
double _ft;
double _ev_gain;
public:
Response(double noise_ = 100,
double fano_ = 0.114,
double gamma_ = 2.5,
double fs_ = 0.03,
double ft_ = 0.02,
double ev_gain_ = 0);
// ~Response();
Response & operator=(const Response & r);
const double & noise;
const double & fano;
const double & gamma;
const double & fs;
const double & ft;
const double & ev_gain;
void set_gain(const double ev_gain);
double FWHM(double ev);
void show() const;
void out(std::ostream & ost) const;
};
class Window
{
private:
std::string _material;
double _thickness;
double _density;
public:
Window(std::string material_ = "Be",
double thickness_ = 24e-4,
double density_ = 1.848);
// ~Window();
Window & operator=(const Window & w);
const std::string & material;
const double & thickness;
const double & density;
double transmission(double ev) const;
void show() const;
void out(std::ostream & ost) const;
};
class Detector
{
private:
Channel _channel;
Response _response;
Window _window;
public:
Detector(Channel channel_ = Channel(), Response response_ = Response(), Window window_ = Window());
// ~Detector();
Detector & operator=(const Detector & d);
const Channel & channel;
const Response & response;
const Window & window;
void genspec(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned, bool det_response = true, bool det_window = true) const;
void genspec(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned, bool det_response = true, bool det_window = true) const;
void genspec(const std::vector<double> & ev_raw, const std::vector<double> & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate, const std::vector<int> & row, bool det_response = true, bool det_window = true) const;
void genspec(const double & ev_raw, const double & y_raw, std::vector<double> & y_binned, std::vector<double> & y_separate, bool det_response = true, bool det_window = true) const;
void show() const;
void out(std::ostream & ost) const;
};
#endif<file_sep>/python/tools/__init__.py
__author__ = 'Yue'
__all__ = ["elementlookup"]<file_sep>/python/snr/snip.py
from .pysnip import *<file_sep>/src/spectrum.cpp
/*spectrum.cpp*/
#include "spectrum.hpp"
#include <iostream>
#include <numeric> //std::accumulate
#include <algorithm> //std::find
#include "xraylib.h"
#include "xrf.hpp"
#include "cs.hpp"
#define MP 1
Xrf::Xrf() : ev_vec(_ev_vec), y_vec(_y_vec), lines(_lines), Z_vec(_Z_vec), row(_row), y_mat(_y_mat), omega(_omega) {}
Xrf::Xrf(double ev0, const Compound & c, double n_photons)
: ev_vec(_ev_vec), y_vec(_y_vec), lines(_lines), Z_vec(_Z_vec), row(_row), y_mat(_y_mat), omega(_omega)
{
std::vector<int> lines_temp;
double weight;
int Z;
if (n_photons < 0)
n_photons = 1;
_row.push_back(0);
for (int i = 0; i < c.Z_vec.size(); i++)
{
Z = c.Z_vec[i];
weight = c.p_vec[i]*AtomicWeight(Z)/c.molecular_weight*n_photons;
lines_temp = mac_xrf(ev0, Z, _ev_vec, _y_vec, weight);
if (lines_temp.size() > 0)
{
_Z_vec.push_back(Z);
_lines.insert(_lines.end(),lines_temp.begin(),lines_temp.end());
_row.push_back(lines.size());
}
}
}
//c++98:
// Xrf::Xrf(double ev0, const Compound & c, const solid_angle omega, n_photons)
// : ev_vec(_ev_vec), y_vec(_y_vec), lines(_lines), Z_vec(_Z_vec), row(_row), y_mat(_y_mat), omega(_omega)
// {
// *this = Xrf(ev0, c, n_photons);
// for (int i = 0; i < y_vec.size(); i++)
// _y_vec[i] *= omega.subtend/(4*Pi);
// }
// c++11:
Xrf::Xrf(double ev0, const Compound & c, const solid_angle omega, double n_photons)
: Xrf(ev0, c, n_photons)
{
#pragma omp parallel for if(MP)
for (int i = 0; i < y_vec.size(); i++)
_y_vec[i] *= omega.subtend/(4*Pi);
}
Xrf::Xrf(const Sample & s, const Illumination & il, const solid_angle & omega_)
: ev_vec(_ev_vec), y_vec(_y_vec), lines(_lines), Z_vec(_Z_vec), row(_row), y_mat(_y_mat), omega(_omega)
{
_omega = omega_;
_row.push_back(0);
int n_pixels = omega.theta.size()*omega.beta.size();
for (std::vector<Monolayer>::const_iterator layer = s.layer_vec.begin(); layer < s.layer_vec.end(); layer++)
{
std::cout << "Calculating XRF for layer " << (*layer).layer << std::endl;
Xrf *temp_p = new Xrf(il.ev0, *layer, il.n_photons), & temp = *temp_p;
std::cout << "Calculating XRF attenuation for layer " << (*layer).layer << std::endl;
temp._y_mat.resize(temp.lines.size()*n_pixels,0);
#pragma omp parallel for collapse(3) if(MP)
for (int line=0; line < temp.lines.size(); line++)
{
for (int theta = 0; theta < omega.theta.size(); theta++)
{
for (int beta = 0; beta < omega.beta.size(); beta++)
{
double psiprime = il.psi_prime(omega.theta[theta], omega.beta[beta]);
int i = line*n_pixels+theta*omega.beta.size()+beta; // the matrix pixel identifier in y_mat
//attenuation within the layer
temp._y_mat[i] = temp.y_vec[line]*atten_mono(il.ev0, temp.ev_vec[line], il.psi, psiprime, *layer)*omega.domega(omega.theta[theta])/(4*Pi);
if (psiprime > 0) //reflection geometry
//attenuation from layers upstream
for (std::vector<Monolayer>::const_iterator layer_up = s.layer_vec.begin(); layer_up < layer; layer_up++)
temp._y_mat[i] *= atten_refl(il.ev0, temp.ev_vec[line], il.psi, psiprime, *layer_up);
else if (psiprime < 0) //transmission geometry
{
//attenuation from layers upstream
for (std::vector<Monolayer>::const_iterator layer_up = s.layer_vec.begin(); layer_up < layer; layer_up++)
temp._y_mat[i] *= atten_trans_in(il.ev0, il.psi, *layer_up);
//attenuation from layers downstream
for (std::vector<Monolayer>::const_iterator layer_down = layer+1; layer_down < s.layer_vec.end(); layer_down++)
temp._y_mat[i] *= atten_trans_out(temp.ev_vec[line], psiprime, *layer_down);
}
}
}
}
add(temp, true);
delete temp_p;
}
sum();
}
Xrf & Xrf::operator=(const Xrf & x)
{
if (this != & x)
{
_y_mat = x.y_mat;
_omega = x.omega;
_ev_vec = x.ev_vec;
_y_vec = x.y_vec;
_Z_vec = x.Z_vec;
_lines = x.lines;
_row = x.row;
}
return *this;
}
void Xrf::add(const Xrf & x, bool mat_only)
{
int Z, n_pixels;
std::vector<int>::const_iterator Z_iter;
if (!x.y_mat.empty())
n_pixels = omega.theta.size()*omega.beta.size();
for (int i = 0; i < x.Z_vec.size(); i++)
{
Z = x.Z_vec[i];
Z_iter = std::find(Z_vec.begin(), Z_vec.end(), Z);
if (Z_iter != Z_vec.end())
{
int p = Z_iter - Z_vec.begin();
if (!mat_only)
{
std::vector<double>::const_iterator i2 = x.y_vec.begin()+x.row[i];
std::vector<double>::iterator i1 = _y_vec.begin()+row[p];
#pragma omp parallel for if(MP)
for (int j = 0; j < row[p+1]-row[p]; j++)
*(i1+j) += *(i2+j);
}
if (n_pixels >= 1)
{
std::vector<double>::const_iterator i2 = x.y_mat.begin()+x.row[i]*n_pixels;
std::vector<double>::iterator i1 = _y_mat.begin()+row[p]*n_pixels;
#pragma omp parallel for if(MP)
for (int j = 0; j < (row[p+1] - row[p])*n_pixels; j++)
*(i1+j) += *(i2+j);
}
}
else
{
#pragma omp parallel sections if(MP)
{
{
_Z_vec.push_back(Z);
_lines.insert(_lines.end(),x.lines.begin()+x.row[i],x.lines.begin()+x.row[i+1]);
_row.push_back(lines.size());
}
#pragma omp section
{
_ev_vec.insert(_ev_vec.end(),x.ev_vec.begin()+x.row[i],x.ev_vec.begin()+x.row[i+1]);
_y_vec.insert(_y_vec.end(),x.y_vec.begin()+x.row[i],x.y_vec.begin()+x.row[i+1]);
}
#pragma omp section
{
if (n_pixels)
_y_mat.insert(_y_mat.end(),x.y_mat.begin()+x.row[i]*n_pixels,x.y_mat.begin()+x.row[i+1]*n_pixels);
}
}
}
}
}
void Xrf::sum()
{
if (!y_mat.empty())
{
_y_vec.resize(ev_vec.size(), 0.0);
int n_pixels = omega.theta.size()*omega.beta.size();
#pragma omp parallel for if(MP)
for (int i = 0; i < ev_vec.size(); i++)
_y_vec[i] = std::accumulate(y_mat.begin()+i*n_pixels,y_mat.begin()+(i+1)*n_pixels,0.0);
}
}
void Xrf::show() const
{
out(std::cout);
}
void Xrf::out(std::ostream & ost) const
{
std::vector<int>::const_iterator Z, r, line;
std::vector<double>::const_iterator ev, y;
r = row.begin();
y = y_vec.begin();
ev = ev_vec.begin();
line = lines.begin();
ost << "# ======================================== #" << std::endl;
ost << "# # # # # XRF lines: # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
for (Z = Z_vec.begin(); Z < Z_vec.end(); Z++)
{
ost << "# ======================================== #" << std::endl;
ost << "# Z = " << *Z << " (" << AtomicNumberToSymbol(*Z) << ")" << std::endl;
ost << "# Line#\tE0(eV)\tIntensity\n";
ost << "# ======================================== #" << std::endl;
for (int i=(*r); i<(*(r+1)); i++)
{
ost << *(line++) << "\t";
ost << *(ev++) << "\t";
ost << *(y++) << std::endl;
}
ost << "# ======================================== #" << std::endl;
r++;
}
ost << "# # # # # End of XRF # # # # #" << std::endl;
}
Rayleigh::Rayleigh()
: y_mat(_y_mat), omega(_omega), y(_y), ev0(_ev0)
{}
Rayleigh::Rayleigh(double ev0_, const Compound & c, const solid_angle & omega_, double n_photons, bool calc_sum)
: y_mat(_y_mat), omega(_omega), y(_y), ev0(_ev0)
{
_ev0 = ev0_;
_omega = omega_;
_y_mat.resize(omega.theta.size()*omega.beta.size(),0.0);
if (n_photons < 0)
n_photons = 1;
#pragma omp parallel for collapse(2) if(MP)
for (int j = 0; j < omega.theta.size(); j++)
for (int k = 0; k < omega.beta.size(); k++)
_y_mat[j*omega.beta.size()+k] = c.dmac_rayleigh_pol(ev0, omega.theta[j], omega.beta[k])*omega.domega(omega.theta[j])*n_photons;
if (calc_sum)
sum();
}
Rayleigh::Rayleigh(const Sample & s, const Illumination & il, const solid_angle & omega_)
: y_mat(_y_mat), omega(_omega), y(_y), ev0(_ev0)
{
_ev0 = il.ev0;
_omega = omega_;
for (std::vector<Monolayer>::const_iterator layer = s.layer_vec.begin(); layer < s.layer_vec.end(); layer++)
{
std::cout << "Calculating Rayleigh scattering for layer " << (*layer).layer << std::endl;
Rayleigh *temp_p = new Rayleigh(il.ev0, *layer, omega, il.n_photons, false), & temp = *temp_p;
std::cout << "Calculating Rayleigh scattering attenuation for layer " << (*layer).layer << std::endl;
#pragma omp parallel for collapse(2) if(MP)
for (int theta = 0; theta < omega.theta.size(); theta++)
{
for (int beta = 0; beta < omega.beta.size(); beta++)
{
double psiprime = il.psi_prime(omega.theta[theta], omega.beta[beta]);
int i = theta*omega.beta.size() + beta;
//attenuation within the layer
temp._y_mat[i] *= atten_mono(il.ev0, il.ev0, il.psi, psiprime, *layer);
if (psiprime > 0) //reflection geometry
//attenuation from layers upstream
for (std::vector<Monolayer>::const_iterator layer_up = s.layer_vec.begin(); layer_up < layer; layer_up++)
temp._y_mat[i] *= atten_refl(il.ev0, il.ev0, il.psi, psiprime, *layer_up);
else if (psiprime < 0) //transmission geometry
{
//attenuation from layers upstream
for (std::vector<Monolayer>::const_iterator layer_up = s.layer_vec.begin(); layer_up < layer; layer_up++)
temp._y_mat[i] *= atten_trans_in(il.ev0, il.psi, *layer_up);
//attenuation from layers downstream
for (std::vector<Monolayer>::const_iterator layer_down = layer+1; layer_down < s.layer_vec.end(); layer_down++)
temp._y_mat[i] *= atten_trans_out(il.ev0, psiprime, *layer_down);
}
}
}
add(temp, true);
delete temp_p;
}
sum();
}
Rayleigh & Rayleigh::operator=(const Rayleigh & r)
{
if (this != & r)
{
_y_mat = r.y_mat;
_omega = r.omega;
_y = r.y;
_ev0 = r.ev0;
}
return *this;
}
void Rayleigh::add(const Rayleigh & x, bool mat_only)
{
if (!x.y_mat.empty())
{
if (y_mat.empty())
_y_mat = x.y_mat;
else
#pragma omp parallel for if(MP)
for (int i = 0; i < y_mat.size(); i++)
_y_mat[i] += x.y_mat[i];
}
if (!mat_only)
_y += x.y;
}
void Rayleigh::sum()
{
if (!y_mat.empty())
_y = std::accumulate(y_mat.begin(), y_mat.end(), 0.0);
}
void Rayleigh::show() const
{
out(std::cout);
}
void Rayleigh::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Rayleigh scattering # # # # #" << std::endl;
ost << "# Peak @ " << ev0 << std::endl;
ost << "# Intensity: " << y << std::endl;
ost << "# # # # # End of Rayleigh scattering # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}
Compton::Compton()
: y_mat(_y_mat), omega(_omega), y_vec(_y_vec), ev_vec(_ev_vec), ev0(_ev0)
{}
Compton::Compton(double ev0_, const Compound & c, const solid_angle & omega_, double n_photons, bool calc_ev, bool calc_sum)
: y_mat(_y_mat), omega(_omega), y_vec(_y_vec), ev_vec(_ev_vec), ev0(_ev0)
{
_ev0 = ev0_;
_omega = omega_;
_y_mat.resize(omega.theta.size()*omega.beta.size(),0.0);
double theta, beta;
if (n_photons < 0)
n_photons = 1;
// #pragma omp parallel for collapse(2) private(theta, beta) if(MP)
// for (int j = 0; j < omega.theta.size(); j++)
// for (int k = 0; k < omega.beta.size(); k++)
// {
// theta = omega.theta[j];
// if (theta < 1e-6)
// continue;
// beta = omega.beta[k];
// _y_mat[j*omega.beta.size()+k] = c.dmac_compton_pol(ev0, theta, beta)*omega.domega(theta);
// }
for (int j = 0; j < omega.theta.size(); j++)
{
theta = omega.theta[j];
if (theta < 1e-6)
continue;
#pragma omp parallel for if(MP)
for (int k = 0; k < omega.beta.size(); k++)
{
beta = omega.beta[k];
_y_mat[j*omega.beta.size()+k] = c.dmac_compton_pol(ev0, theta, beta)*omega.domega(theta)*n_photons;
}
}
if (calc_ev)
{
_ev_vec.resize(omega.theta.size(),0.0);
#pragma omp parallel for if(MP)
for (int i = 0; i < omega.theta.size(); i++)
_ev_vec[i] = ev_scattered(omega.theta[i], ev0);
}
if (calc_sum)
{
_y_vec.resize(omega.theta.size(),0.0);
#pragma omp parallel for if(MP)
for (int i = 0; i < omega.theta.size(); i++)
_y_vec[i] = std::accumulate(y_mat.begin()+i*omega.beta.size(),y_mat.begin()+(i+1)*omega.beta.size(),0.0);
}
}
Compton::Compton(const Sample & s, const Illumination & il, const solid_angle & omega_)
: y_mat(_y_mat), omega(_omega), y_vec(_y_vec), ev_vec(_ev_vec), ev0(_ev0)
{
_ev0 = il.ev0;
_omega = omega_;
ev();
for (std::vector<Monolayer>::const_iterator layer = s.layer_vec.begin(); layer < s.layer_vec.end(); layer++)
{
std::cout << "Calculating Compton scattering for layer " << (*layer).layer << std::endl;
Compton *temp_p = new Compton(il.ev0, *layer, omega, il.n_photons, false, false), & temp = *temp_p;
std::cout << "Calculating Compton scattering attenuation for layer " << (*layer).layer << std::endl;
#pragma omp parallel for collapse(2) if(MP)
// for (std::vector<double>::const_iterator theta = omega.theta.begin(), ev = ev_vec.begin(); theta < omega.theta.end(); theta++, ev++)
for (int theta = 0; theta < omega.theta.size(); theta++)
for (int beta = 0; beta < omega.beta.size(); beta++)
{
double psiprime = il.psi_prime(omega.theta[theta], omega.beta[beta]);
int i = theta*omega.beta.size() + beta;
//attenuation within the layer
temp._y_mat[i] *= atten_mono(il.ev0, ev_vec[theta], il.psi, psiprime, *layer);
if (psiprime > 0) //reflection geometry
//attenuation from layers upstream
for (std::vector<Monolayer>::const_iterator layer_up = s.layer_vec.begin(); layer_up < layer; layer_up++)
temp._y_mat[i] *= atten_refl(il.ev0, ev_vec[theta], il.psi, psiprime, *layer_up);
else if (psiprime < 0) //transmission geometry
{
//attenuation from layers upstream
for (std::vector<Monolayer>::const_iterator layer_up = s.layer_vec.begin(); layer_up < layer; layer_up++)
temp._y_mat[i] *= atten_trans_in(il.ev0, il.psi, *layer_up);
//attenuation from layers downstream
for (std::vector<Monolayer>::const_iterator layer_down = layer+1; layer_down < s.layer_vec.end(); layer_down++)
temp._y_mat[i] *= atten_trans_out(ev_vec[theta], psiprime, *layer_down);
}
}
add(temp, true);
delete temp_p;
}
sum();
}
Compton & Compton::operator=(const Compton & c)
{
if (this != & c)
{
_y_mat = c.y_mat;
_omega = c.omega;
_y_vec = c.y_vec;
_ev_vec = c.ev_vec;
_ev0 = ev0;
}
return *this;
}
void Compton::add(const Compton & x, bool mat_only)
{
if (!x.y_mat.empty())
{
if (y_mat.empty())
_y_mat = x.y_mat;
else
#pragma omp parallel for if(MP)
for (int i = 0; i < y_mat.size(); i++)
_y_mat[i] += x.y_mat[i];
}
if (!mat_only)
#pragma omp parallel for if(MP)
for (int j = 0; j < x.y_vec.size(); j++)
_y_vec[j] += x.y_vec[j];
}
void Compton::sum()
{
if (!y_mat.empty())
{
_y_vec.resize(omega.theta.size());
#pragma omp parallel for if(MP)
for (int j = 0; j < omega.theta.size(); j++)
_y_vec[j] = std::accumulate(y_mat.begin()+j*omega.beta.size(),y_mat.begin()+(j+1)*omega.beta.size(),0.0);
}
}
void Compton::sum_ev()
{
if (!y_mat.empty())
{
_y_vec.resize(omega.theta.size());
_ev_vec.resize(omega.theta.size());
double theta;
#pragma omp parallel for private(theta) if(MP)
for (int j = 0; j < omega.theta.size(); j++)
{
theta = omega.theta[j];
_y_vec[j] = std::accumulate(y_mat.begin()+j*omega.beta.size(),y_mat.begin()+(j+1)*omega.beta.size(),0.0);
_ev_vec[j] = ev_scattered(theta, ev0);
}
}
}
void Compton::ev()
{
_ev_vec.resize(omega.theta.size());
double theta;
#pragma omp parallel for private(theta) if(MP)
for (int j = 0; j < omega.theta.size(); j++)
{
theta = omega.theta[j];
_ev_vec[j] = ev_scattered(theta, ev0);
}
}
void Compton::show() const
{
out(std::cout);
}
void Compton::out(std::ostream & ost) const
{
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Compton scattering # # # # #" << std::endl;
ost << "# Energy range: " << ev_vec.back() << " - " << ev_vec.front() << std::endl;
ost << "# Total intensity: " << std::accumulate(y_vec.begin(), y_vec.end(), 0.0) << std::endl;
ost << "# # # # # End of Compton scattering # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}
Spectrum::Spectrum()
: y_vec(_y_vec), xrf(_xrf), ray(_ray), comp(_comp), omega(_omega), sample(_sample), illumination(_illumination), detector(_detector), Z_vec(xrf.Z_vec), lines(xrf.lines), row(xrf.row), y_sep(_y_sep) {}
Spectrum::Spectrum(const Sample & s, const Illumination & il, const solid_angle & omega_, const Detector & det, bool det_response, bool det_window, bool separate)
: y_vec(_y_vec), xrf(_xrf), ray(_ray), comp(_comp), omega(_omega), sample(_sample), illumination(_illumination), detector(_detector), Z_vec(xrf.Z_vec), lines(xrf.lines), row(xrf.row), y_sep(_y_sep)
{
_omega = omega_;
_illumination = il;
_sample = s;
_detector = det;
_xrf = Xrf(sample, illumination, omega);
#pragma omp parallel sections if(MP)
{
{_ray = Rayleigh(sample, illumination, omega);}
#pragma omp section
{_comp = Compton(sample, illumination, omega);}
}
_y_vec.clear();
_y_vec.resize(detector.channel.n_channels);
if (!separate)
{
detector.genspec(xrf.ev_vec, xrf.y_vec, _y_vec);
detector.genspec(illumination.ev0, ray.y, _y_vec);
detector.genspec(comp.ev_vec, comp.y_vec, _y_vec);
}
else
{
std::vector<int> row_temp{0, -1};
detector.genspec(xrf.ev_vec, xrf.y_vec, _y_vec, _y_sep, xrf.row);
detector.genspec(illumination.ev0, ray.y, _y_vec, _y_sep);
detector.genspec(comp.ev_vec, comp.y_vec, _y_vec, _y_sep, row_temp);
}
}
Spectrum::Spectrum(double ev0, const Compound & c, const solid_angle & omega_, const Detector & det, bool det_response, bool det_window, bool separate)
: y_vec(_y_vec), xrf(_xrf), ray(_ray), comp(_comp), omega(_omega), sample(_sample), illumination(_illumination), detector(_detector), Z_vec(xrf.Z_vec), lines(xrf.lines), row(xrf.row), y_sep(_y_sep)
{
_omega = omega_;
_detector = det;
#pragma omp parallel sections if(MP)
{
{_xrf = Xrf(sample, illumination, omega);}
#pragma omp section
{_ray = Rayleigh(sample, illumination, omega);
_comp = Compton(sample, illumination, omega);}
}
_y_vec.clear();
_y_vec.resize(detector.channel.n_channels);
std::cout << "Generating detector binned spectra..." << std::endl;
if (!separate)
{
detector.genspec(xrf.ev_vec, xrf.y_vec, _y_vec);
detector.genspec(illumination.ev0, ray.y, _y_vec);
detector.genspec(comp.ev_vec, comp.y_vec, _y_vec);
}
else
{
std::vector<int> row_temp{0, -1};
detector.genspec(xrf.ev_vec, xrf.y_vec, _y_vec, _y_sep, xrf.row);
detector.genspec(illumination.ev0, ray.y, _y_vec, _y_sep);
detector.genspec(comp.ev_vec, comp.y_vec, _y_vec, _y_sep, row_temp);
}
}
void Spectrum::show() const
{
out(std::cout);
}
void Spectrum::out(std::ostream & ost) const
{
if (ost != std::cout)
{
ost << "# Total spectrum for each channel " << std::endl;
for (auto i : y_vec)
ost << i << "\t";
ost << std::endl;
ost << std::endl;
if (!y_sep.empty())
{
ost << "# Separate spectra for each channel " << std::endl;
int j = 0;
for (auto i : y_sep)
{
ost << i << "\t";
if (++j == y_vec.size())
{
ost << std::endl;
ost << std::endl;
j = 0;
}
}
ost << std::endl;
}
}
ost << "# ======================================== #" << std::endl;
ost << "# # # # # Spectrum: # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
xrf.out(ost);
ray.out(ost);
comp.out(ost);
ost << "# ======================================== #" << std::endl;
ost << "# # # # # End of spectrum # # # # #" << std::endl;
ost << "# ======================================== #" << std::endl;
}
| 8d7badb0e1570af621026096eb6f328ac9ca5eff | [
"Markdown",
"Python",
"Makefile",
"C++"
] | 42 | Python | gbzan/sim-xrf | e0009b969551e5a03c3b14e4d02153be234b7ca5 | 7e817de8092166866930e0ade71f3f3136392e8d | |
refs/heads/master | <repo_name>GameTechDev/D3D12VariableRateShading<file_sep>/src/SimpleCamera.h
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#pragma once
using namespace DirectX;
class SimpleCamera
{
public:
SimpleCamera();
void Init(XMFLOAT3 position);
void Update(float elapsedSeconds);
XMMATRIX GetViewMatrix();
XMMATRIX GetProjectionMatrix(float fov, float aspectRatio, float nearPlane = 1.0f, float farPlane = 1000.0f);
void SetMoveSpeed(float unitsPerSecond);
void SetTurnSpeed(float radiansPerSecond);
void OnKeyDown(WPARAM key);
void OnKeyUp(WPARAM key);
void OnMouseDown(WPARAM btnState, int x, int y);
void OnMouseUp(WPARAM btnState, int x, int y);
void OnMouseMove(WPARAM btnState, int x, int y);
private:
void Reset();
struct KeysPressed
{
bool w;
bool a;
bool s;
bool d;
bool r;
bool left;
bool right;
bool up;
bool down;
};
XMFLOAT3 m_initialPosition;
XMFLOAT3 m_position;
float m_yaw; // Relative to the +z axis.
float m_pitch; // Relative to the xz plane.
float m_dx;
float m_dy;
XMFLOAT3 m_lookDirection;
XMFLOAT3 m_upDirection;
float m_moveSpeed; // Speed at which the camera moves, in units per second.
float m_turnSpeed; // Speed at which the camera turns, in radians per second.
XMFLOAT2 m_lastMousePos;
KeysPressed m_keysPressed;
};
<file_sep>/src/FrameResource.cpp
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#include "stdafx.h"
#include "FrameResource.h"
FrameResource::FrameResource(ID3D12Device* pDevice, UINT cityRowCount, UINT cityColumnCount, UINT cityMaterialCount, float citySpacingInterval) :
m_fenceValue(0),
m_cityRowCount(cityRowCount),
m_cityColumnCount(cityColumnCount),
m_cityMaterialCount(cityMaterialCount)
{
m_modelMatrices.resize(m_cityRowCount * m_cityColumnCount);
m_modelShadingRates.resize(m_cityRowCount * m_cityColumnCount);
// The command allocator is used by the main sample class when
// resetting the command list in the main update loop. Each frame
// resource needs a command allocator because command allocators
// cannot be reused until the GPU is done executing the commands
// associated with it.
ThrowIfFailed(pDevice->CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT, IID_PPV_ARGS(&m_commandAllocator)));
ThrowIfFailed(pDevice->CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_BUNDLE, IID_PPV_ARGS(&m_bundleAllocator)));
// Create an upload heap for the constant buffers.
ThrowIfFailed(pDevice->CreateCommittedResource(
&CD3DX12_HEAP_PROPERTIES(D3D12_HEAP_TYPE_UPLOAD),
D3D12_HEAP_FLAG_NONE,
&CD3DX12_RESOURCE_DESC::Buffer(sizeof(SceneConstantBuffer) * m_cityRowCount * m_cityColumnCount),
D3D12_RESOURCE_STATE_GENERIC_READ,
nullptr,
IID_PPV_ARGS(&m_cbvUploadHeap)));
// Map the constant buffers. Note that unlike D3D11, the resource
// does not need to be unmapped for use by the GPU. In this sample,
// the resource stays 'permenantly' mapped to avoid overhead with
// mapping/unmapping each frame.
CD3DX12_RANGE readRange(0, 0); // We do not intend to read from this resource on the CPU.
ThrowIfFailed(m_cbvUploadHeap->Map(0, &readRange, reinterpret_cast<void**>(&m_pConstantBuffers)));
// Update all of the model matrices once; our cities don't move so
// we don't need to do this ever again.
SetCityPositions(citySpacingInterval, -citySpacingInterval);
SetCityShadingRates();
}
FrameResource::~FrameResource()
{
m_cbvUploadHeap->Unmap(0, nullptr);
m_pConstantBuffers = nullptr;
}
void FrameResource::InitBundle(ID3D12Device* pDevice, ID3D12PipelineState* pPso,
UINT frameResourceIndex, UINT numIndices, D3D12_INDEX_BUFFER_VIEW* pIndexBufferViewDesc, D3D12_VERTEX_BUFFER_VIEW* pVertexBufferViewDesc,
ID3D12DescriptorHeap* pCbvSrvDescriptorHeap, UINT cbvSrvDescriptorSize, ID3D12DescriptorHeap* pSamplerDescriptorHeap, ID3D12RootSignature* pRootSignature)
{
ThrowIfFailed(pDevice->CreateCommandList(0, D3D12_COMMAND_LIST_TYPE_BUNDLE, m_bundleAllocator.Get(), pPso, IID_PPV_ARGS(&m_bundle)));
NAME_D3D12_OBJECT(m_bundle);
PopulateCommandList(m_bundle.Get(), pPso, frameResourceIndex, numIndices, pIndexBufferViewDesc,
pVertexBufferViewDesc, pCbvSrvDescriptorHeap, cbvSrvDescriptorSize, pSamplerDescriptorHeap, pRootSignature);
ThrowIfFailed(m_bundle->Close());
}
void FrameResource::SetCityPositions(FLOAT intervalX, FLOAT intervalZ)
{
for (UINT i = 0; i < m_cityRowCount; i++)
{
FLOAT cityOffsetZ = i * intervalZ;
for (UINT j = 0; j < m_cityColumnCount; j++)
{
FLOAT cityOffsetX = j * intervalX;
// The y position is based off of the city's row and column
// position to prevent z-fighting.
XMStoreFloat4x4(&m_modelMatrices[i * m_cityColumnCount + j], XMMatrixTranslation(cityOffsetX, 0.02f * (i * m_cityColumnCount + j), cityOffsetZ));
}
}
}
void FrameResource::SetCityShadingRates()
{
for (UINT i = 0; i < m_cityRowCount; i++)
{
for (UINT j = 0; j < m_cityColumnCount; j++)
{
m_modelShadingRates[i * m_cityColumnCount + j] = 0;
}
}
}
void FrameResource::PopulateCommandList(ID3D12GraphicsCommandList* pCommandList, ID3D12PipelineState* pPso,
UINT frameResourceIndex, UINT numIndices, D3D12_INDEX_BUFFER_VIEW* pIndexBufferViewDesc, D3D12_VERTEX_BUFFER_VIEW* pVertexBufferViewDesc,
ID3D12DescriptorHeap* pCbvSrvDescriptorHeap, UINT cbvSrvDescriptorSize, ID3D12DescriptorHeap* pSamplerDescriptorHeap, ID3D12RootSignature* pRootSignature)
{
// If the root signature matches the root signature of the caller, then
// bindings are inherited, otherwise the bind space is reset.
pCommandList->SetGraphicsRootSignature(pRootSignature);
ID3D12DescriptorHeap* ppHeaps[] = { pCbvSrvDescriptorHeap, pSamplerDescriptorHeap };
pCommandList->SetDescriptorHeaps(_countof(ppHeaps), ppHeaps);
pCommandList->IASetPrimitiveTopology(D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
pCommandList->IASetIndexBuffer(pIndexBufferViewDesc);
pCommandList->IASetVertexBuffers(0, 1, pVertexBufferViewDesc);
pCommandList->SetGraphicsRootDescriptorTable(0, pCbvSrvDescriptorHeap->GetGPUDescriptorHandleForHeapStart());
pCommandList->SetGraphicsRootDescriptorTable(1, pSamplerDescriptorHeap->GetGPUDescriptorHandleForHeapStart());
// Calculate the descriptor offset due to multiple frame resources.
// (m_cityMaterialCount + 1) SRVs + how many CBVs we have currently.
UINT frameResourceDescriptorOffset = (m_cityMaterialCount + 1) + (frameResourceIndex * m_cityRowCount * m_cityColumnCount);
CD3DX12_GPU_DESCRIPTOR_HANDLE cbvSrvHandle(pCbvSrvDescriptorHeap->GetGPUDescriptorHandleForHeapStart(), frameResourceDescriptorOffset, cbvSrvDescriptorSize);
INT shadingRate;
PIXBeginEvent(pCommandList, 0, L"Draw cities");
for (UINT i = 0; i < m_cityRowCount; i++)
{
for (UINT j = 0; j < m_cityColumnCount; j++)
{
shadingRate = m_modelShadingRates[i * m_cityColumnCount + j];
#ifdef VRS_ENABLED
D3D12_SHADING_RATE eVRSShadingRate = D3D12_SHADING_RATE_1X1;
if (shadingRate == 0)
{
eVRSShadingRate = D3D12_SHADING_RATE_1X1;
}
else if (shadingRate == 1)
{
eVRSShadingRate = D3D12_SHADING_RATE_1X2;
}
else if (shadingRate == 4)
{
eVRSShadingRate = D3D12_SHADING_RATE_2X1;
}
else if (shadingRate == 5)
{
eVRSShadingRate = D3D12_SHADING_RATE_2X2;
}
else if (shadingRate == 10)
{
eVRSShadingRate = D3D12_SHADING_RATE_4X4;
}
reinterpret_cast<ID3D12GraphicsCommandList5*>(pCommandList)->RSSetShadingRate(eVRSShadingRate, nullptr);
#endif
pCommandList->SetPipelineState(pPso);
// Set the city's root constant for dynamically indexing into the material array.
pCommandList->SetGraphicsRoot32BitConstant(3, (i * m_cityColumnCount) + j, 0);
// Set this city's CBV table and move to the next descriptor.
pCommandList->SetGraphicsRootDescriptorTable(2, cbvSrvHandle);
cbvSrvHandle.Offset(cbvSrvDescriptorSize);
pCommandList->DrawIndexedInstanced(numIndices, 1, 0, 0, 0);
}
}
PIXEndEvent(pCommandList);
}
void XM_CALLCONV FrameResource::UpdateConstantBuffers(FXMMATRIX view, CXMMATRIX projection, float offsetX, bool vrs, int shadingRate, bool viz)
{
XMMATRIX model;
XMFLOAT4X4 mvp;
XMMATRIX cameraMat;
XMFLOAT4X4 camera;
XMFLOAT3 eyePos;
XMFLOAT3 originalPos;
cameraMat = XMMatrixInverse(nullptr, view);
XMStoreFloat4x4(&camera, cameraMat);
eyePos.x = camera._41;
eyePos.y = camera._42;
eyePos.z = camera._43;
m_pConstantBuffers[0].offset.x = offsetX;
for (UINT i = 0; i < m_cityRowCount; i++)
{
for (UINT j = 0; j < m_cityColumnCount; j++)
{
model = XMLoadFloat4x4(&m_modelMatrices[i * m_cityColumnCount + j]);
// Compute the model-view-projection matrix.
XMStoreFloat4x4(&mvp, XMMatrixTranspose(model * view * projection));
m_pConstantBuffers[i * m_cityColumnCount + j].mvp = mvp;
m_pConstantBuffers[i * m_cityColumnCount + j].offset = m_pConstantBuffers[0].offset;
float fModel41 = m_modelMatrices[i * m_cityColumnCount + j]._41;
float fModel42 = m_modelMatrices[i * m_cityColumnCount + j]._42;
float fModel43 = m_modelMatrices[i * m_cityColumnCount + j]._43;
originalPos.x = fModel41 + m_pConstantBuffers[0].offset.x;
originalPos.y = fModel42;
originalPos.z = fModel43;
//distance based solution
//if the distance from the Eye in World space and the object in world space is > N
#ifdef VRS_ENABLED
if (vrs) {
if (shadingRate != 42)
m_modelShadingRates[i * m_cityColumnCount + j] = shadingRate;
else {
float distance = sqrt(((eyePos.x - originalPos.x)*(eyePos.x - originalPos.x)) + ((eyePos.y - originalPos.y)*(eyePos.y - originalPos.y)) + ((eyePos.z - originalPos.z)*(eyePos.z - originalPos.z)));
if (distance < g_fNearDistance)
{
m_modelShadingRates[i * m_cityColumnCount + j] = D3D12_SHADING_RATE_1X1;
}
else if (distance >= g_fNearDistance && distance < g_fFarDistance)
{
m_modelShadingRates[i * m_cityColumnCount + j] = D3D12_SHADING_RATE_2X2;
}
else
{
m_modelShadingRates[i * m_cityColumnCount + j] = D3D12_SHADING_RATE_4X4;
}
}
}
else {
m_modelShadingRates[i * m_cityColumnCount + j] = D3D12_SHADING_RATE_1X1;
}
if (m_pConstantBuffers[i * m_cityColumnCount + j].shadingRate != m_modelShadingRates[i * m_cityColumnCount + j])
m_pConstantBuffers[i * m_cityColumnCount + j].shadingRate = m_modelShadingRates[i * m_cityColumnCount + j];
if (m_pConstantBuffers[i * m_cityColumnCount + j].enableViz != (FLOAT)viz)
m_pConstantBuffers[i * m_cityColumnCount + j].enableViz = viz;
#endif
}
}
}
<file_sep>/src/imgui.ini
[Window][Debug##Default]
Pos=60,60
Size=400,400
Collapsed=0
[Window][Hello, DX12 VRS Tier 1!]
Pos=0,0
Size=300,720
Collapsed=0
[Window][]
Pos=658,220
Size=400,360
Collapsed=0
[Window][Test]
Pos=0,0
Size=300,720
Collapsed=0
[Window][MDAPI Metrics]
Pos=880,50
Size=400,620
Collapsed=0
[Window][Hello, world!]
Pos=60,60
Size=344,115
Collapsed=0
[Window][D3D12VariableRateShading]
Pos=55,33
Size=503,310
Collapsed=0
[Window][Intel D3D12VariableRateShading]
Pos=20,2
Size=419,302
Collapsed=0
<file_sep>/src/DXSample.cpp
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#include "stdafx.h"
#include "DXSample.h"
using namespace Microsoft::WRL;
DXSample::DXSample(UINT width, UINT height, std::wstring name) :
m_width(width),
m_height(height),
m_title(name),
m_useWarpDevice(false)
{
WCHAR assetsPath[512];
GetAssetsPath(assetsPath, _countof(assetsPath));
m_assetsPath = assetsPath;
m_aspectRatio = static_cast<float>(width) / static_cast<float>(height);
}
DXSample::~DXSample()
{
}
// Helper function for resolving the full path of assets.
std::wstring DXSample::GetAssetFullPath(LPCWSTR assetName)
{
return m_assetsPath + assetName;
}
// Helper function for acquiring the first available hardware adapter that supports Direct3D 12.
// If no such adapter can be found, *ppAdapter will be set to nullptr.
_Use_decl_annotations_
void DXSample::GetHardwareAdapter(IDXGIFactory2* pFactory, IDXGIAdapter1** ppAdapter)
{
ComPtr<IDXGIAdapter1> adapter;
*ppAdapter = nullptr;
for (UINT adapterIndex = 0; DXGI_ERROR_NOT_FOUND != pFactory->EnumAdapters1(adapterIndex, &adapter); ++adapterIndex)
{
DXGI_ADAPTER_DESC1 desc;
adapter->GetDesc1(&desc);
if (desc.Flags & DXGI_ADAPTER_FLAG_SOFTWARE)
{
// Don't select the Basic Render Driver adapter.
// If you want a software adapter, pass in "/warp" on the command line.
continue;
}
// Check to see if the adapter supports Direct3D 12, but don't create the
// actual device yet.
if (SUCCEEDED(D3D12CreateDevice(adapter.Get(), D3D_FEATURE_LEVEL_11_0, _uuidof(ID3D12Device), nullptr)))
{
break;
}
}
*ppAdapter = adapter.Detach();
}
// Helper function for setting the window's title text.
void DXSample::SetCustomWindowText(LPCWSTR text)
{
std::wstring windowText = m_title + L": " + text;
SetWindowText(Win32Application::GetHwnd(), windowText.c_str());
}
// Helper function for parsing any supplied command line args.
_Use_decl_annotations_
void DXSample::ParseCommandLineArgs(WCHAR* argv[], int argc)
{
for (int i = 1; i < argc; ++i)
{
if (_wcsnicmp(argv[i], L"-warp", wcslen(argv[i])) == 0 ||
_wcsnicmp(argv[i], L"/warp", wcslen(argv[i])) == 0)
{
m_useWarpDevice = true;
m_title = m_title + L" (WARP)";
}
}
}
<file_sep>/src/D3D12VariableRateShading.h
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#pragma once
#include "DXSample.h"
#include "StepTimer.h"
#include "FrameResource.h"
#include "SimpleCamera.h"
#include <windows.h>
using namespace DirectX;
// Note that while ComPtr is used to manage the lifetime of resources on the CPU,
// it has no understanding of the lifetime of resources on the GPU. Apps must account
// for the GPU lifetime of resources to avoid destroying objects that may still be
// referenced by the GPU.
// An example of this can be found in the class method: OnDestroy().
using Microsoft::WRL::ComPtr;
class D3D12VariableRateShading : public DXSample
{
public:
D3D12VariableRateShading(UINT width, UINT height, std::wstring name, bool bDisableVRS);
virtual void OnInit();
virtual void OnUpdate();
virtual void OnRender();
virtual void OnDestroy();
virtual void OnKeyDown(UINT8 key);
virtual void OnKeyUp(UINT8 key);
virtual void OnMouseDown(WPARAM btnState, int x, int y);
virtual void OnMouseMove(WPARAM btnState, int x, int y);
private:
static const UINT FrameCount = 3;
static const UINT CityRowCount = 9;
static const UINT CityColumnCount = 9;
static const UINT CityMaterialCount = CityRowCount * CityColumnCount;
static const UINT CityMaterialTextureWidth = 64;
static const UINT CityMaterialTextureHeight = 64;
static const UINT CityMaterialTextureChannelCount = 4;
static const bool UseBundles = false; // Test VRS with bundles
static const float CitySpacingInterval;
// Pipeline objects.
CD3DX12_VIEWPORT m_viewport;
CD3DX12_RECT m_scissorRect;
ComPtr<IDXGISwapChain3> m_swapChain;
ComPtr<ID3D12Device> m_device;
ComPtr<ID3D12Resource> m_renderTargets[FrameCount];
ComPtr<ID3D12Resource> m_depthStencil;
ComPtr<ID3D12CommandAllocator> m_commandAllocator;
ComPtr<ID3D12CommandQueue> m_commandQueue;
ComPtr<ID3D12RootSignature> m_rootSignature;
ComPtr<ID3D12DescriptorHeap> m_rtvHeap;
ComPtr<ID3D12DescriptorHeap> m_cbvSrvHeap;
ComPtr<ID3D12DescriptorHeap> m_dsvHeap;
ComPtr<ID3D12DescriptorHeap> m_samplerHeap;
ComPtr<ID3D12PipelineState> m_pipelineState;
ComPtr<ID3D12QueryHeap> m_gpuTimerQuery = nullptr;
ComPtr<ID3D12Resource> m_gpuTimerBuffer = nullptr;
#ifdef VRS_ENABLED
ComPtr<ID3D12PipelineState> m_pipelineStateViz;
// Must use ID3D12GraphicsCommandList5 for RSSetShadingRate
ComPtr<ID3D12GraphicsCommandList5> m_commandList;
#else
ComPtr<ID3D12GraphicsCommandList> m_commandList;
#endif
// App resources.
UINT m_numIndices;
ComPtr<ID3D12Resource> m_vertexBuffer;
ComPtr<ID3D12Resource> m_indexBuffer;
ComPtr<ID3D12Resource> m_cityDiffuseTexture;
ComPtr<ID3D12Resource> m_cityNormalTexture;
ComPtr<ID3D12Resource> m_cityMaterialTextures[CityMaterialCount];
D3D12_VERTEX_BUFFER_VIEW m_vertexBufferView;
D3D12_INDEX_BUFFER_VIEW m_indexBufferView;
StepTimer m_timer;
UINT m_cbvSrvDescriptorSize;
UINT m_rtvDescriptorSize;
SimpleCamera m_camera;
// Frame resources.
std::vector<FrameResource*> m_frameResources;
FrameResource* m_pCurrentFrameResource;
UINT m_currentFrameResourceIndex;
// Synchronization objects.
UINT m_frameIndex;
UINT m_frameCounter;
HANDLE m_fenceEvent;
ComPtr<ID3D12Fence> m_fence;
UINT64 m_fenceValue;
UINT m_currentFrame = 1;
float m_clearColor[4]{ 0.0f, 0.2f, 0.4f, 1.0f };
void LoadPipeline();
void LoadAssets();
void LoadImGui();
void CreateFrameResources();
void PopulateCommandList(FrameResource* pFrameResource);
// Timer Queries
void LoadTimer();
void ReadTimer();
void DestroyTimer();
XMFLOAT2 m_framerateOrigin;
wchar_t m_framerateBuffer[512];
uint64_t m_startTime = 0;
uint64_t m_endTime = 0;
uint64_t m_GPUClocksPerSecond = 0;
uint64_t *m_timeStampBuffer = nullptr; //just two slots to hold our per frame start and stop time
double m_totalFrameTime = 0;
double m_currentFrameTime = 0;
double m_GPUSecondsPerClock = 0;
char* m_shadingRates[6]{ "1x1 [2] Blue Viz", "1x2 [3] Teal Viz", "2x1 [4] Yellow Viz", "2x2 [5] Green Viz", "4x4 [6] Red Viz" , "Distance based [7]"};
bool m_showGUI = true;
bool m_showFPS = true;
bool m_showMetrics = true;
bool m_showVRSVisualizer = true;
bool m_animate = true;
float m_offsetX = 0.0f;
D3D12_SHADING_RATE m_shadingRateValues[5]
{
D3D12_SHADING_RATE_1X1,
D3D12_SHADING_RATE_1X2,
D3D12_SHADING_RATE_2X1,
D3D12_SHADING_RATE_2X2,
D3D12_SHADING_RATE_4X4,
};
int m_currentShadingRate = D3D12_SHADING_RATE_1X1;
#ifdef VRS_ENABLED
bool m_enableVRS = true;
bool m_VRSTier1Enabled = true;
#else
bool m_enableVRS = false;
bool m_VRSTier1Enabled = false;
#endif
enum Descriptors
{
#ifdef FONTS_ENABLED
MyFont,
#endif
#ifdef IMGUI_ENABLED
Imgui,
#endif
Count
};
};
<file_sep>/README.md

Demonstrate Gen11 Tier1 VRS Capabilities with DirectX 12
## Keyboard Commands
* W,A,S,D: Move Camera
* <,>: Camera Yaw
* ^,v: Camera Pitch
* Space: Toggle Animation
* C: Toggle Visualization Colors
* R: Reset Camera
* 1: Toggle VRS
* 2: 1x1 Shading Rate
* 3: 1x2 Shading Rate
* 4: 2x1 Shading Rate
* 5: 2x2 Shading Rate
* 6: 4x4 Shading Rate
* 7: Distance Based Shading Rate
## Requirements
* Visual Studio 2017
* Windows 10 Insider Preview 19H1 Build 18334 or Greater
* Windows 10 Insider Preview 19H1 Build 18334 or Greater SDK
* VRS Capable Windows Driver
<file_sep>/src/FrameResource.h
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#pragma once
#include "DXSampleHelper.h"
#define ANIMATION_ENABLED
#define FONTS_ENABLED
#define IMGUI_ENABLED
#define VRS_ENABLED
using namespace DirectX;
using Microsoft::WRL::ComPtr;
class FrameResource
{
private:
void SetCityPositions(FLOAT intervalX, FLOAT intervalZ);
void FrameResource::SetCityShadingRates();
public:
struct SceneConstantBuffer
{
XMFLOAT4X4 mvp; // Model-view-projection (MVP) matrix.
XMFLOAT4 offset;
INT shadingRate;
FLOAT enableViz;
FLOAT padding[42];
};
ComPtr<ID3D12CommandAllocator> m_commandAllocator;
ComPtr<ID3D12CommandAllocator> m_bundleAllocator;
ComPtr<ID3D12GraphicsCommandList> m_bundle;
ComPtr<ID3D12Resource> m_cbvUploadHeap;
SceneConstantBuffer* m_pConstantBuffers;
UINT64 m_fenceValue;
std::vector<XMFLOAT4X4> m_modelMatrices;
std::vector<UINT> m_modelShadingRates;
UINT m_cityRowCount;
UINT m_cityColumnCount;
UINT m_cityMaterialCount;
float g_fNearDistance = 80.0f; //used for determining how near/far camera is from objects
float g_fFarDistance = 140.0f;
FrameResource(ID3D12Device* pDevice, UINT cityRowCount, UINT cityColumnCount, UINT cityMaterialCount, float citySpacingInterval);
~FrameResource();
void InitBundle(ID3D12Device* pDevice, ID3D12PipelineState* pPso,
UINT frameResourceIndex, UINT numIndices, D3D12_INDEX_BUFFER_VIEW* pIndexBufferViewDesc, D3D12_VERTEX_BUFFER_VIEW* pVertexBufferViewDesc,
ID3D12DescriptorHeap* pCbvSrvDescriptorHeap, UINT cbvSrvDescriptorSize, ID3D12DescriptorHeap* pSamplerDescriptorHeap, ID3D12RootSignature* pRootSignature);
void PopulateCommandList(ID3D12GraphicsCommandList* pCommandList, ID3D12PipelineState* pPso,
UINT frameResourceIndex, UINT numIndices, D3D12_INDEX_BUFFER_VIEW* pIndexBufferViewDesc, D3D12_VERTEX_BUFFER_VIEW* pVertexBufferViewDesc,
ID3D12DescriptorHeap* pCbvSrvDescriptorHeap, UINT cbvSrvDescriptorSize, ID3D12DescriptorHeap* pSamplerDescriptorHeap, ID3D12RootSignature* pRootSignature);
void XM_CALLCONV UpdateConstantBuffers(FXMMATRIX view, CXMMATRIX projection, float offsetX, bool vrs, int shadingRate, bool viz);
};
<file_sep>/src/Main.cpp
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#include "stdafx.h"
#include "D3D12VariableRateShading.h"
//_Use_decl_annotations_
int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE, LPSTR sCommandLine, int nCmdShow)
{
// Enable run-time memory check for debug builds.
#if defined(DEBUG) | defined(_DEBUG)
_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF);
#endif
int numArgs = 0;
auto cmdLine = CommandLineToArgvW(GetCommandLineW(), &numArgs);
bool bDisableVRS = false;
if (cmdLine == NULL)
{
MessageBox(NULL, L"Unable to parse command line.", L"Error", MB_OK);
return -1;
}
for (int i = 0; i < numArgs; i++)
{
if (!wcscmp(cmdLine[i], L"-disableVRS"))
{
bDisableVRS = true;
}
}
D3D12VariableRateShading sample(1280, 720, L"Intel D3D12 Variable Rate Shading Sample", bDisableVRS);
return Win32Application::Run(&sample, hInstance, nCmdShow);
}
<file_sep>/src/Win32Application.cpp
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
//*********************************************************
//
// Copyright 2019 Intel Corporation
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files(the "Software"), to deal in the Software
// without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to
// whom the Software is furnished to do so, subject to the
// following conditions :
// The above copyright notice and this permission notice shall
// be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//*********************************************************
#include "stdafx.h"
#include "Win32Application.h"
#include "Windowsx.h"
HWND Win32Application::m_hwnd = nullptr;
int Win32Application::Run(DXSample* pSample, HINSTANCE hInstance, int nCmdShow)
{
// Parse the command line parameters
int argc;
LPWSTR* argv = CommandLineToArgvW(GetCommandLineW(), &argc);
pSample->ParseCommandLineArgs(argv, argc);
LocalFree(argv);
// Initialize the window class.
WNDCLASSEX windowClass = { 0 };
windowClass.cbSize = sizeof(WNDCLASSEX);
windowClass.style = CS_HREDRAW | CS_VREDRAW;
windowClass.lpfnWndProc = WindowProc;
windowClass.hInstance = hInstance;
windowClass.hCursor = LoadCursor(NULL, IDC_ARROW);
windowClass.lpszClassName = L"DXSampleClass";
RegisterClassEx(&windowClass);
RECT windowRect = { 0, 0, static_cast<LONG>(pSample->GetWidth()), static_cast<LONG>(pSample->GetHeight()) };
AdjustWindowRect(&windowRect, WS_OVERLAPPEDWINDOW, FALSE);
// Create the window and store a handle to it.
m_hwnd = CreateWindow(
windowClass.lpszClassName,
pSample->GetTitle(),
WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT,
CW_USEDEFAULT,
windowRect.right - windowRect.left,
windowRect.bottom - windowRect.top,
nullptr, // We have no parent window.
nullptr, // We aren't using menus.
hInstance,
pSample);
// Initialize the sample. OnInit is defined in each child-implementation of DXSample.
pSample->OnInit();
ShowWindow(m_hwnd, nCmdShow);
// Main sample loop.
MSG msg = {};
while (msg.message != WM_QUIT)
{
if (::PeekMessage(&msg, NULL, 0U, 0U, PM_REMOVE))
{
::TranslateMessage(&msg);
::DispatchMessage(&msg);
continue;
}
}
pSample->OnDestroy();
// Return this part of the WM_QUIT message to Windows.
return static_cast<char>(msg.wParam);
}
extern LRESULT ImGui_ImplWin32_WndProcHandler(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam);
// Main message handler for the sample.
LRESULT CALLBACK Win32Application::WindowProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
DXSample* pSample = reinterpret_cast<DXSample*>(GetWindowLongPtr(hWnd, GWLP_USERDATA));
if (ImGui_ImplWin32_WndProcHandler(hWnd, message, wParam, lParam))
return true;
switch (message)
{
case WM_CREATE:
{
// Save the DXSample* passed in to CreateWindow.
LPCREATESTRUCT pCreateStruct = reinterpret_cast<LPCREATESTRUCT>(lParam);
SetWindowLongPtr(hWnd, GWLP_USERDATA, reinterpret_cast<LONG_PTR>(pCreateStruct->lpCreateParams));
}
return 0;
case WM_KEYDOWN:
if (pSample)
{
pSample->OnKeyDown(static_cast<UINT8>(wParam));
}
return 0;
case WM_KEYUP:
if (pSample)
{
pSample->OnKeyUp(static_cast<UINT8>(wParam));
}
return 0;
case WM_LBUTTONDOWN:
case WM_MBUTTONDOWN:
case WM_RBUTTONDOWN:
if (pSample)
{
pSample->OnMouseDown(wParam, GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam));
}
return 0;
case WM_LBUTTONUP:
case WM_MBUTTONUP:
case WM_RBUTTONUP:
if (pSample)
{
pSample->OnMouseUp(wParam, GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam));
}
return 0;
case WM_MOUSEMOVE:
if (pSample)
{
pSample->OnMouseMove(wParam, GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam));
}
return 0;
case WM_PAINT:
if (pSample)
{
pSample->OnUpdate();
pSample->OnRender();
}
return 0;
case WM_DESTROY:
PostQuitMessage(0);
return 0;
}
// Handle any messages the switch statement didn't.
return DefWindowProc(hWnd, message, wParam, lParam);
}
| 928ee5d07a79fc30e3fa24a5f8f6a798ab55d722 | [
"Markdown",
"C++",
"INI"
] | 9 | C++ | GameTechDev/D3D12VariableRateShading | c8d498b3c0eefa29274de278571021b6abefca33 | e11f3483542b43751de22c000338558a720ab488 | |
refs/heads/master | <repo_name>HyoukSunKwon/WEB<file_sep>/MovieSearch/script.js
// ajax
var xhr = new XMLHttpRequest();
window.addEventListener("load", function(){
var movieurl = "https://api.themoviedb.org/3/movie/now_playing?api_key=<KEY>&language=en-US&page=1";
xhr.open("GET", movieurl);
xhr.send();
xhr.addEventListener("readystatechange", currentlyMovies);
});
var div = document.querySelector("#result");
var response;
var id;
var title;
// het the information
function currentlyMovies (){
if (xhr.readyState == 4){
response= JSON.parse(xhr.responseText);
var picture = "https://image.tmdb.org/t/p/w185_and_h278_bestv2/";
for(var i=0; i< response.results.length ; i++) {
//create div
var divEachItem = document.createElement("div");
divEachItem.setAttribute("id", "item");
var divImg = document.createElement("div");
divImg.setAttribute("id", "imgMovie");
var divMovieInfo = document.createElement("div");
divMovieInfo.setAttribute("id", "inforMovie");
//Image of Movie
var mImg = document.createElement("a");
mImg.setAttribute("onclick", "localstorage(this);");
mImg.setAttribute("id", response.results[i].id);
//aImg.setAttribute("")
var movieImg = document.createElement("img");
movieImg.setAttribute("src", picture + response.results[i].poster_path);
mImg.appendChild(movieImg);
divImg.appendChild(mImg);
//Information of Movie (title, vote_average, overview, release_date)
var movieInfor = document.createElement("p");
movieInfor.innerHTML += "<b>" + response.results[i].title + "</b>";
movieInfor.innerHTML += "<br> Rating: " + response.results[i].vote_average * 10 + "%";
movieInfor.innerHTML += "<br>" + response.results[i].overview;
movieInfor.innerHTML += "<br> Release date: " + response.results[i].release_date;
divMovieInfo.appendChild(movieInfor);
divEachItem.appendChild(divImg);
divEachItem.appendChild(divMovieInfo);
div.appendChild(divEachItem);
}
}
}
function getInfoDetail(id){
for( var i = 0 ; i < response.results.length; i++)
if(response.results[i].id == id)
return response.results[i];
}
function localstorage(e){
localStorage.setItem("movieImgD", e.id);
localStorage.setItem('detailInfo', JSON.stringify(getInfoDetail(e.id)));
location.href = 'detail.html';
}
<file_sep>/MovieSearch/scriptDetail.js
var movieImgD = localStorage.getItem("movieImgD");
console.log(movieImgD);
var movieInfoD = JSON.parse(localStorage.getItem("detailInfo"));
var div = document.querySelector("#detail");
var key = "<KEY>";
var xhr = new XMLHttpRequest();
window.addEventListener("load", function(){
var urlMovieDetail = "https://api.themoviedb.org/3/movie/" + movieImgD + "/videos?api_key=" + key;
console.log(urlMovieDetail);
xhr.open("GET", urlMovieDetail);
xhr.send();
xhr.addEventListener("readystatechange", detail);
});
function detail(){
if (xhr.readyState == 4){
var response = JSON.parse(xhr.responseText);
var divEachItem = document.createElement("div");
divEachItem.setAttribute("id", "item");
var divImg = document.createElement("div");
divImg.setAttribute("id", "imgMovie");
var divMovieInfo = document.createElement("div");
divMovieInfo.setAttribute("id", "inforMovie");
//Image of Movie
var mImg = document.createElement("a");
mImg.setAttribute("onclick", "localstorage(this);");
mImg.setAttribute("id", movieInfoD.id);
//aImg.setAttribute("")
var picture = "https://image.tmdb.org/t/p/w185_and_h278_bestv2/";
var movieImg = document.createElement("img");
movieImg.setAttribute("src", picture + movieInfoD.poster_path);
mImg.appendChild(movieImg);
divImg.appendChild(mImg);
//Information of Movie (title, vote_average, overview, release_date)
var movieInfor = document.createElement("p");
movieInfor.innerHTML += "<b>" + movieInfoD.title + "</b>";
movieInfor.innerHTML += "<br> Rating: " + movieInfoD.vote_average * 10 + "%";
movieInfor.innerHTML += "<br>" + movieInfoD.overview;
movieInfor.innerHTML += "<br> Release date: " + movieInfoD.release_date;
divMovieInfo.appendChild(movieInfor);
divEachItem.appendChild(divImg);
divEachItem.appendChild(divMovieInfo);
div.appendChild(divEachItem);
// //Trailer
var divTrailer = document.createElement("div");
divTrailer.setAttribute("class", "trailer");
for (var i=0;i<response.results.length;i++){
if (response.results[i].type == "Trailer"){
console.log(response.results[i].key);
divTrailer.innerHTML += '<iframe width="420" height="345" src="https://www.youtube.com/embed/' + response.results[i].key + '"></iframe>';
}
}
div.appendChild(divTrailer);
}
} | 8eb4ff6c4f717a202aedc444ded5f04144f24051 | [
"JavaScript"
] | 2 | JavaScript | HyoukSunKwon/WEB | 1b079eaad7a674e5521757bcd3c7fd3950ee02a3 | 5362dc69d008709cda120c8300714c5f43308289 | |
refs/heads/main | <file_sep>import './App.css';
import Header from "./components/Header";
import Main from "./components/Main";
import {BrowserRouter as Router, Route, Switch} from "react-router-dom";
import { Helmet } from "react-helmet";
function App() {
return (
<div className="App">
<Router>
<Switch>
<Route path='/'>
<Helmet>
<title>Card matching game</title>
</Helmet>
<Header />
<Main />
</Route>
</Switch>
</Router>
</div>
);
}
export default App;
<file_sep># sx-homework
<file_sep>import React, {useEffect, useState} from "react";
import styled from "styled-components";
import Card from "./Card";
import {images} from "../data/cards";
const GameArea = () => {
const [cards, setCards] = useState([]);
const [openCards, setOpenCards] = useState([]);
const duplicateImages = () => {
setCards(images.concat(images));
}
useEffect(() => {
duplicateImages();
}, []);
return(
<Container>
{
cards
.sort(() => Math.random() - 0.5)
.map((card, index) => (
<Card
key={index}
card={card}
/>
))
}
</Container>
)
}
export default GameArea;
const Container = styled.div`
width: 50%;
display: flex;
flex-wrap: wrap;
row-gap: 10px;
column-gap: 10px;
align-items: center;
justify-content: space-between;
`
<file_sep>import React from "react";
import styled from "styled-components";
import Display from "./Display";
import GameArea from "./GameArea";
const Main = () => {
return(
<Container>
<Display />
<GameArea />
</Container>
)
}
export default Main;
const Container = styled.div`
display: flex;
flex-direction: column;
align-items: center;
`
<file_sep>import React from "react";
import styled from "styled-components";
import { useHistory } from 'react-router-dom';
const Header = () => {
const history = useHistory();
return(
<Container>
<Logo src={require(`../images/splendex-logo.svg`).default} onClick={() => {history.push('/')}} />
</Container>
)
}
export default Header;
const Container = styled.div`
height: 60px;
width: 100%;
background: black;
display: flex;
align-items: center;
`
const Logo = styled.img`
height: 50px;
background: transparent;
margin-left: 10px;
cursor: pointer;
`
const NewGame = styled.div`
background: red;
color: white;
border: 2px solid black;
border-radius: 4px;
height: 40px;
display: flex;
justify-content: center;
align-items: center;
opacity: 0.85;
text-transform: uppercase;
cursor: pointer;
padding: 2px 8px;
font-weight: 600;
`
<file_sep>import angular from '../images/angular.png';
import d3 from '../images/d3.png';
import jenkins from '../images/jenkins.png';
import postcss from '../images/postcss.png';
import react from '../images/react.png';
import redux from '../images/redux.png';
import sass from '../images/sass.png';
import splendex from '../images/splendex.png';
import ts from '../images/ts.png';
import webpack from '../images/webpack.png';
export const images = [
{
name: "angular",
pic: angular,
isFlipped: false
},
{
name: "d3",
pic: d3,
isFlipped: false
},
{
name: "jenkins",
pic: jenkins,
isFlipped: false
},
{
name: "postcss",
pic: postcss,
isFlipped: false
},
{
name: "react",
pic: react,
isFlipped: false
},
{
name: "redux",
pic: redux,
isFlipped: false
},
{
name: "sass",
pic: sass,
isFlipped: false
},
{
name: "splendex",
pic: splendex,
isFlipped: false
},
{
name: "ts",
pic: ts,
isFlipped: false
},
{
name: "webpack",
pic: webpack,
isFlipped: false
},
]
<file_sep>import React from "react";
import styled from "styled-components";
import { useSelector, useDispatch } from "react-redux";
const Display = () => {
const dispatch = useDispatch();
const tries = useSelector(state => state.displayReducer.tries);
const best = useSelector(state => state.displayReducer.best);
const restart = () => {
return dispatch({type: 'RESTART'});
}
return(
<Container>
<CurrentTries>Current tries: {tries}</CurrentTries>
<Best>Best: {best}</Best>
<Restart onClick={restart}>RESTART</Restart>
</Container>
)
}
export default Display;
const Container = styled.div`
margin-top: 10px;
margin-bottom: 10px;
width: 50%;
display: flex;
justify-content: space-between;
align-items: center;
font-size: 20px;
`
const CurrentTries = styled.p`
`
const Best = styled.p`
`
const Restart = styled.div`
border: 2px solid black;
border-radius: 4px;
height: 40px;
display: flex;
justify-content: center;
align-items: center;
opacity: 0.85;
text-transform: uppercase;
cursor: pointer;
padding: 2px 8px;
font-weight: 600;
`
| 61c16ef88e309824386f0f4cb3abf440df14b40b | [
"JavaScript",
"Markdown"
] | 7 | JavaScript | daanworks/sx-homework | 3a53465131f8f44e4f3786b1510f05d55587040b | fa291f2433f987e4934f34a59d958ad8bd7c6ef5 | |
refs/heads/master | <repo_name>VickyMutai/chat-app<file_sep>/README.md
# Chat App api
### Prerequisites
###### Requirements
Python 3.6.3
### Installing
Create virtual
- python3.6 -m venv virtual
Activate virtual
- source/bin/activate
#### Install all the requirements
- pip install -r requirements.txt
#### Databases
for postgresql users;
* Go to settings;
- Change user,password, name(database name) on the database settings.
for sqlite users;
* Go to settings;
- replace the database section with the following;
- DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
### Make migrations
- python manage.py makemigrations
- python manage.py migrate
#### Run application
- python manage.py runserver
#### Testing API endpoints
##### Authentication
For this functionality we used [Djoser](http://djoser.readthedocs.io/en/latest/introduction.html) - REST implementation of Django authentication system. It handles all the auth/ endpoints.
Example
Creating a new user
```
$ curl -X POST http://127.0.0.1:8000/auth/users/create/ --data 'username=mike&password=<PASSWORD>'
{"email": "", "username": "mike", "id": 1}
```
To access the user details
```
$ curl -X GET http://127.0.0.1:8000/auth/me/
{"detail": "Authentication credentials were not provided."}
```
The error is because we didn't provide the token which is required for all app/ endpoints. To generate a token we do
```
$ curl -X POST http://127.0.0.1:8088/auth/token/create/ --data 'username=mike&password=<PASSWORD>'
{"auth_token":"<KEY>"}
```
We can now use the token for all the other endpoints. Running the same request again:
```
$ curl -X GET http://127.0.0.1:8000/auth/me/ -H 'Authorization: Token <KEY>'
{"email":"","id":4,"username":"mike"}
```
##### Chat
Aside from the auth/ endpoints which handle authentification. We have two main API views
- ChatView. Accepts POST and PATCH requests. For creating chat groups/rooms
- ChatMessageView. Accepts GET and POST requests. For sending and getting messages to/from a chat group.
Using the user we created and token given we can create a chat group and send messages to it.
Creating a chat group:
```
curl -X POST http://127.0.0.1:8000/app/chats/ -H 'Authorization: Token <KEY>'
{"message":"New room created","uri":"bf9116670fae470","status":"SUCCESS"}
```
The uri is a random string generated that uniquely identifies each chat room. To get messages from the chat room
```
curl -X GET http://127.0.0.1:8000/app/chats/bf9116670fae470/messages -H 'Authorization: Token <KEY>'
```
It doesn't return anything since no messages have been sent to that particular chat room. To send a message
```
$ curl -X POST http://127.0.0.1:8000/app/chats/bf9116670fae470/messages/ -H 'Authorization: Token bc968ac5c0410b3e83b81805d804438a5c2425d3' --data message="The force is strong"
{"message":"The force is strong","user":{"id":4,"username":"mike","email":""},"uri":"bf9116670fae470","status":"SUCCESS"}
```
And another one
```
curl -X POST http://127.0.0.1:8000/app/chats/bf9116670fae470/messages/ -H 'Authorization: Token <KEY>' --data message="Phantom Menace was the best"
{"message":"Phantom Menace was the best","user":{"id":4,"username":"mike","email":""},"uri":"bf9116670fae470","status":"SUCCESS"}
```
Using the uri for the chat room we can get all the messages sent there like so
```
curl -X GET http://127.0.0.1:8000/app/chats/bf9116670fae470/messages -H 'Authorization: Token <KEY>'
{"messages":[{"user":{"id":4,"username":"mike","email":""},"message":"The force is strong"},{"user":{"id":4,"username":"mike","email":""},"message":"Phantom Menace was the best"}],"id":10,"uri":"bf9116670fae470"}
```
<file_sep>/app/views.py
from django.shortcuts import render
from django.contrib.auth import get_user_model
from .models import Chat, ChatMember, ChatMessage, deserialize_user
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import permissions
# Create your views here.
class ChatView(APIView):
"""Manage Chat rooms."""
permission_classes = (permissions.IsAuthenticated,)
def post(self, request, *args, **kwargs):
"""create a new chat session."""
user = request.user
chat = Chat.objects.create(admin=user)
context = {
'status':'SUCCESS',
'uri':chat.uri,
'message':'New room created'
}
return Response(context)
def patch(self,request,*args,**kwargs):
"""Add user to chat session."""
User = get_user_model()
uri = kwargs['uri']
username = request.data['username']
user = User.objects.get(username=username)
chat=Chat.objects.get(uri=uri)
admin = chat.admin
if admin != user:
chat.members.get_or_create(user=user,chat=chat)
admin = deserialize_user(admin)
members = [ deserialize_user(chat.user) for chat in chat.members.all()]
members.insert(0,admin)
context = {
'status':'SUCCESS',
'members':members,
'message':'%s joined that chat' %user.username,
'user':deserialize_user(user)
}
return Response(context)
class ChatMessageView(APIView):
"""Create/Get Chat session messages."""
permission_classes = (permissions.IsAuthenticated,)
def get(self,request,*args,**kwargs):
""""Gets messages from a chat session"""
uri = kwargs['uri']
chat = Chat.objects.get(uri=uri)
messages = [chat_message.convert() for chat_message in chat.messages.all()]
context = {
'id':chat.id,
'uri':chat.uri,
'messages':messages,
}
return Response(context)
def post(self,request,*args,**kwargs):
"""create a new message in a chat session."""
uri = kwargs['uri']
message = request.data['message']
user = request.user
chat= Chat.objects.get(uri=uri)
ChatMessage.objects.create(user=user, chat=chat, message=message)
context={
'status': 'SUCCESS',
'uri': chat.uri,
'message': message,
'user': deserialize_user(user)
}
return Response (context)
<file_sep>/app/urls.py
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from . import views
urlpatterns=[
url(r'chats/$', views.ChatView.as_view()),
url(r'chats/(?P<uri>\w+)/$', views.ChatView.as_view()),
url(r'chats/(?P<uri>\w+)/messages/$', views.ChatMessageView.as_view()),
]<file_sep>/app/migrations/0002_auto_20180430_1530.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-04-30 12:30
from __future__ import unicode_literals
import app.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Chat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create', models.DateTimeField(auto_now_add=True)),
('update', models.DateTimeField(auto_now=True)),
('uri', models.URLField(default=app.models._generate_unique_uri)),
('admin', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.RenameModel(
old_name='ChatSessionMember',
new_name='ChatMember',
),
migrations.RenameModel(
old_name='ChatSessionMessage',
new_name='ChatMessage',
),
migrations.RemoveField(
model_name='chatsession',
name='user',
),
migrations.RemoveField(
model_name='chatmember',
name='chat_session',
),
migrations.RemoveField(
model_name='chatmessage',
name='chat_session',
),
migrations.DeleteModel(
name='ChatSession',
),
migrations.AddField(
model_name='chatmember',
name='chat',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.PROTECT, related_name='members', to='app.Chat'),
preserve_default=False,
),
migrations.AddField(
model_name='chatmessage',
name='chat',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.PROTECT, related_name='messages', to='app.Chat'),
preserve_default=False,
),
]
| 88de8d292508fb8db0097a9aba8f9222d4de884b | [
"Markdown",
"Python"
] | 4 | Markdown | VickyMutai/chat-app | e30201e93e42e334f20708463447c5cc8df51fb8 | 07e8b0b460a74f2a4163824737532f117892def4 | |
refs/heads/master | <repo_name>Granitosaurus/matchtick.com<file_sep>/ggmtgg/__init__.py
from flask import Flask
from flask_redis import FlaskRedis
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
app = Flask(__name__)
app.config.from_pyfile('config.py')
redis = FlaskRedis(app, decode_responses=True)
limiter = Limiter(app, key_func=get_remote_address)
import ggmtgg.views
import ggmtgg.updater
<file_sep>/ggmtgg/updater.py
import json
from ggmt.matchticker import GosuTicker
from ggmt.tournament import LiquidBracketDownloader, EVENT_CURRENT, EVENT_FUTURE, EVENT_PAST
from redis import Redis
from ggmtgg import app
@app.cli.command('update-matchticker')
def update_matchticker():
r = Redis()
for game in GosuTicker.games:
print(f'updating matchticker {game}')
g = GosuTicker(game)
value = list(g.download_matches())
r.delete(f'ggmt_tick_{game}')
r.set(f'ggmt_tick_{game}', json.dumps(value))
@app.cli.command('update-tournaments')
def update_tournaments():
r = Redis()
for cat in [EVENT_FUTURE, EVENT_CURRENT, EVENT_PAST]:
for game in LiquidBracketDownloader.games:
print(f'updating tournament {game}_{cat.lower()}')
dl = LiquidBracketDownloader(game)
value = dl.find_tournaments(cat)
r.delete(f'ggmt_tournament_{game}_{cat.lower()}')
r.set(f'ggmt_tournament_{game}_{cat.lower()}', json.dumps(value))
if __name__ == '__main__':
update_tournaments()
update_matchticker()
<file_sep>/ggmtgg/views.py
import json
from flask import render_template, request, redirect, url_for, g, session, abort
from jinja2 import TemplateNotFound
from ggmtgg import app, redis, config, limiter
from ggmt.tournament import LiquidBracketDownloader
from ggmt.matchticker import GosuTicker
def error(reason, suggestion=None):
data = {
'error': reason,
'suggestion': suggestion or ''
}
return json.dumps(data)
api_limit = limiter.shared_limit("100/hour", "api", error_message=error('Exceeded 100/hour api call limit'))
@app.before_first_request
def make_session_permanent():
session.permanent = True
@app.route('/')
def index():
return render_template('index.html')
@app.route('/tick/<string:game>')
def tick(game):
data = redis.get(f'ggmt_tick_{game}')
data = json.loads(data)
return render_template('tick.html', data=data)
@app.route('/api/tick/<string:game>')
@api_limit
def api_tick(game):
if game not in GosuTicker.games:
return error(f'unknown_game;choose from: {GosuTicker.games}')
return redis.get(f'ggmt_tick_{game}')
@app.route('/api/tournament/<string:game>/<string:time>')
@app.route('/api/tournament/<string:game>', defaults={'time': 'all'})
@app.route('/api/tournament', defaults={'time': None, 'game': None})
@api_limit
def api_tournament(game, time):
if game not in LiquidBracketDownloader.games:
return error(f'unknown_game;choose from: {LiquidBracketDownloader.games}',
suggestion='/tournament/game')
times = config.TOURNAMENT_TIMES + ['all']
if time not in times:
return error(f'unknown_time;choose from: {times}')
if time != 'all':
return redis.get(f'ggmt_tournament_{game}_{time}')
else:
items = [json.loads(redis.get(f'ggmt_tournament_{game}_{i}')) for i in config.TOURNAMENT_TIMES]
items = [tour for time in items for tour in time]
return json.dumps(items, ensure_ascii=False)
@app.route('/<string:page_name>')
def static_page(page_name):
try:
return render_template('{}.html'.format(page_name))
except TemplateNotFound:
return abort(404)
<file_sep>/setup.py
from distutils.core import setup
setup(
name='ggmt.gg',
version='0.1',
packages=['ggmtgg'],
url='',
license='GPLv3',
author='granitosaurus',
author_email='',
description=''
)
<file_sep>/ggmtgg/config.py
# WTF_CSRF_ENABLED = True
from ggmt.matchticker import GosuTicker
from ggmt.tournament import EVENT_PAST, EVENT_FUTURE, EVENT_CURRENT
SECRET_KEY = 'very-secret'
REDIS_URL = "redis://:@localhost:6379/0"
# rate limiting
RATELIMIT_STORAGE_URL = "redis://:@localhost:6379/0"
RATELIMIT_HEADERS_ENABLED = True
# ggmt settings
GAMES = {
'Dota 2': 'dota2',
'CS:GO': 'counterstrike',
'Hearthstone': 'hearthstone',
'Heroes of the Storm': 'heroesofthestorm',
'League of Legends': 'lol',
'Overwatch': 'overwatch',
'Starcraft 2': 'starcraft2',
'All': 'all',
}
TOURNAMENT_TIMES = [t.lower() for t in [EVENT_PAST, EVENT_FUTURE, EVENT_CURRENT]]
| c500f0de300edcf87ac79f7c097802328510a68f | [
"Python"
] | 5 | Python | Granitosaurus/matchtick.com | e29a9668406d80115b611d189c6a0a0bbd6002a6 | 4e3684df19b0ef61e1f2f9a946b0d22c0da9f5c0 | |
refs/heads/master | <file_sep>using System;
using System.ComponentModel.Design;
using System.Runtime.InteropServices;
namespace Deliverable_2
{
class Program
{
static void Main(string[] args)
{
string name = "";
int activityChoice = 0;
string youShouldGo = "";
int numberOfPeople = 0;
string travelResult = "";
Console.WriteLine("Hello what is your name?");
name = Console.ReadLine();
Console.WriteLine(
"Hello {0} What are you in the mood for? Here are you options 1. Action 2. Chill Times 3. Danger 4. Good Food ",
name);
activityChoice = int.Parse(Console.ReadLine());
if (activityChoice == 1)
{
youShouldGo = "Stock Car Racing";
}
else if (activityChoice == 2)
{
youShouldGo = "Hiking";
}
else if (activityChoice == 3)
{
youShouldGo = "SkyDiving";
}
else if (activityChoice == 4)
{
youShouldGo = "Taco Bell";
}
else
{
Console.WriteLine("Incorrect response");
return;
}
Console.WriteLine("How many people are you bringing?");
numberOfPeople = int.Parse(Console.ReadLine());
if (numberOfPeople == 0)
{
travelResult = "sneakers";
}
else if (numberOfPeople >= 1 && numberOfPeople <= 4)
{
travelResult = "sedan";
}
else if (numberOfPeople >= 5 && numberOfPeople <= 10)
{
travelResult = "Volkswagen bus";
}
else if (numberOfPeople <= 11)
{
travelResult= "airplane";
}
else
{
Console.WriteLine("You have entered the wrong input, Please try again");
}
Console.WriteLine("Okay if you’re in the mood for {0}, then you should travel in a {1}!",youShouldGo, travelResult);
Console.WriteLine("Goodbye,{0}",name);
}
}
}
| 872f8fd59afdbfc4245e0facc38f55fd974c8597 | [
"C#"
] | 1 | C# | Lamarcross22/GrandCircus | 7a7a014906ed86ffb3a07ee2c53c8ac7d0302722 | 181332617b19f3fdba6e013ea12bb78a06b5970f | |
refs/heads/master | <file_sep># RandomView
仿网易星球悬浮效果,随机位置不重叠

<file_sep>package com.yuyan.randomview;
public interface OnRemoveListener {
void remove(RandomView randomView);
}
<file_sep>package com.yuyan.randomview;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.Animation;
import android.view.animation.TranslateAnimation;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
private RandomFrameLayout randomFrameLayout;
private TextView tvWait;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
randomFrameLayout = (RandomFrameLayout) findViewById(R.id.fl_random);
tvWait = (TextView) findViewById(R.id.tv_wait);
randomFrameLayout.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
randomFrameLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
updateViewValue();
}
});
randomFrameLayout.setOnRemoveListener(new OnRemoveListener() {
@Override
public void remove(RandomView randomView) {
if (randomFrameLayout.getChildCount() == 2) {
tvWait.startAnimation(animation());
tvWait.setVisibility(View.VISIBLE);
}
}
});
}
public void updateViewValue() {
for (int i = 0; i < 10; i++) {
randomFrameLayout.updateView("0.0000" + i);
}
}
private TranslateAnimation animation() {
TranslateAnimation animation = new TranslateAnimation(0, 0, -15, 15);
animation.setRepeatCount(Animation.INFINITE);
animation.setDuration(1000);
animation.setRepeatMode(Animation.REVERSE);
return animation;
}
}
| 4ae7561fe1acf5eb2cece0ead8da4a8c33cbe80b | [
"Markdown",
"Java"
] | 3 | Markdown | 2038920059/RandomView | d6011a0aed883ae5344d4ffe26f70e05b1ff0a84 | a6b41b56faf93dbfc52ced15647eb6c47703f2fd | |
refs/heads/main | <file_sep># programming-task-plg
ProgrammingTask plugin generates a nice popup with some informative text to Joomla 4 users.
<file_sep><?php
/**
* @author <NAME>
* @version 1.0.0
*/
defined('_JEXEC') or die;
use Joomla\CMS\Plugin\CMSPlugin;
use Joomla\CMS\Language\Text;
/**
* ProgrammingTask Plugin.
*/
class PlgSystemProgrammingTask extends CMSPlugin
{
/**
* Load the language file on instantiation.
*
* @var boolean
* @since 3.9.0
*/
protected $autoloadLanguage = true;
/**
* Application object.
*
* @var \Joomla\CMS\Application\CMSApplication
* @since 3.8.0
*/
protected $app;
/**
* This event is triggered immediately before the framework has rendered the application.
*
* @return void
*/
public function onBeforeRender()
{
// Get the parameter values
$lifetime = $this->params->get('cookie_lifetime', 60) * 24 * 60 * 60;
$cookieName = $this->params->get("cookie_name", "is_visited_before");
/*
if($cookieName === '')
{
throw new \RuntimeException("ERROR_NO_COOKIE_NAME");
}
*/
// Get cookie
$cookieValue = $this->app->input->cookie->get($cookieName);
// Load assets
$this->app->getDocument()->getWebAssetManager()->registerAndUseScript(
'plg_system_programming_task',
'plg_system_programming_task/app.js',
[],
['defer' => true]
)->registerAndUseStyle(
'plg_system_programming_task',
'plg_system_programming_task/main.css'
);
// Check if a cookie is set.
if(!$cookieValue)
{
// Set cookie with value
$this->app->input->cookie->set(
$cookieName,
microtime(true),
time() + $lifetime,
$this->app->get('cookie_path', '/'),
$this->app->get('cookie_domain', ''),
$this->app->isHttpsForced(),
true
);
}
else
{
$dateTime = date("m-d-Y H:i:s", $cookieValue);
// Create the document.
$dom = new \DOMDocument('1.0', 'UTF-8');
// Create a new div element
$cookieBanner = $dom->createElement('div');
$cookieBanner->setAttribute('id', "myDIV");
$cookieBanner->setAttribute('class', "cookie-banner");
// Create a new div element and set the attributes.
$cookieBannerInner = $dom->createElement('div');
$cookieBannerInner->setAttribute('class', "cookie-banner-inner");
// Create a new div element and set the attributes.
$cookieBannerCopy = $dom->createElement('div');
$cookieBannerCopy->setAttribute('class', "cookie-banner-copy");
// Create a new div element and set the attributes.
$bannerHeader = Text::_('PLG_SYSTEM_PROGRAMING_TASK_COOKIE_BANNER_HEADER');
$cookieBannerHeader = $dom->createElement('div', $bannerHeader);
$cookieBannerHeader->setAttribute('class', "cookie-banner-header");
// Create a new div element and set the attributes.
$bannerDescription = Text::_('PLG_SYSTEM_PROGRAMING_TASK_COOKIE_BANNER_DESCRIPTION');
$cookieBannerDescription = $dom->createElement('div', $bannerDescription . $dateTime);
$cookieBannerDescription->setAttribute('class', "cookie-banner-description");
// Create a new div element and set the attributes.
$cookieBannerActions = $dom->createElement('div');
$cookieBannerActions->setAttribute('class', "cookie-banner-actions");
// Create a new button element and set the attributes.
$cookieBannerCta = $dom->createElement('button', 'OK');
$cookieBannerCta->setAttribute('class', "cookie-banner-cta");
$cookieBannerCta->setAttribute('onclick', "myFunction();");
// Append a cookieBannerInner in a cookieBanner
$cookieBanner->appendChild($cookieBannerInner);
// Append a cookieBannerCopy & cookieBannerActions in a cookieBannerInner
$cookieBannerInner->appendChild($cookieBannerCopy);
$cookieBannerInner->appendChild($cookieBannerActions);
// Append a cookieBannerHeader & cookieBannerDescription in a cookieBannerCopy
$cookieBannerCopy->appendChild($cookieBannerHeader);
$cookieBannerCopy->appendChild($cookieBannerDescription);
// Append a cookieBannerCta in a cookieBannerActions
$cookieBannerActions->appendChild($cookieBannerCta);
// Append the whole bunch.
$dom->appendChild($cookieBanner);
// Parse the HTML.
echo $dom->saveHTML($cookieBanner);
}
}
}
| 113d3e755297f3f620dff60dccdb0de4668fd8f0 | [
"Markdown",
"PHP"
] | 2 | Markdown | mahmoudahmedd/programming-task-plg | 35b987a5b1da651a76cb3a5d829f28759686e72d | cea05a1529c0e0ddfe98943887786862be20a0bf | |
refs/heads/master | <file_sep># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NFSP agents trained on Kuhn Poker."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v1 as tf
from open_spiel.python import policy
from open_spiel.python import rl_environment
from open_spiel.python.algorithms import exploitability
from open_spiel.python.algorithms import nfsp
import glob
import os
import numpy as np
from open_spiel.python.algorithms import random_agent
FLAGS = flags.FLAGS
flags.DEFINE_integer("num_train_episodes", int(3e6),
"Number of training episodes.")
flags.DEFINE_integer("eval_every", 100,
"Episode frequency at which the agents are evaluated.")
flags.DEFINE_list("hidden_layers_sizes", [
128,
], "Number of hidden units in the avg-net and Q-net.")
flags.DEFINE_integer("replay_buffer_capacity", int(2e5),
"Size of the replay buffer.")
flags.DEFINE_integer("reservoir_buffer_capacity", int(2e6),
"Size of the reservoir buffer.")
flags.DEFINE_float("anticipatory_param_agent0", 0,
"Prob of using the rl best response as episode policy for agent0.")
flags.DEFINE_float("anticipatory_param_agent1", 0,
"Prob of using the rl best response as episode policy for agent1.")
flags.DEFINE_string("experiment_name", "kuhn_poker_0.1_7_27", "Experiment name")
flags.DEFINE_string("load_path", "/home/jxu8/Code_update/open_spiel/sessions_nfsp/", "Path to load the session")
flags.DEFINE_string("save_path", "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/", "Path to load the session")
class NFSPPolicies(policy.Policy):
"""Joint policy to be evaluated."""
def __init__(self, env, nfsp_policies, mode):
game = env.game
player_ids = [0, 1]
super(NFSPPolicies, self).__init__(game, player_ids)
self._policies = nfsp_policies
self._mode = mode
self._obs = {"info_state": [None, None], "legal_actions": [None, None]}
def action_probabilities(self, state, player_id=None):
cur_player = state.current_player()
legal_actions = state.legal_actions(cur_player)
self._obs["current_player"] = cur_player
self._obs["info_state"][cur_player] = (
state.information_state_tensor(cur_player))
self._obs["legal_actions"][cur_player] = legal_actions
info_state = rl_environment.TimeStep(
observations=self._obs, rewards=None, discounts=None, step_type=None)
with self._policies[cur_player].temp_mode_as(self._mode):
p = self._policies[cur_player].step(info_state, is_evaluation=True).probs
prob_dict = {action: p[action] for action in legal_actions}
return prob_dict
def eval_against_random_agent1(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
cur_agents = [trained_agents[0], random_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes
def eval_against_random_agent0(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
cur_agents = [random_agents[0], trained_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes
def eval_against_trained_agents(env, trained_agents, num_episodes):
wins = np.zeros(3)
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = trained_agents[player_id].step(time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [agent.step(time_step, is_evaluation=True) for agent in trained_agents]
action_list = [agent_output.action for agent_output in agents_output]
time_step = env.step(action_list)
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes
def eval_between_random_agents(env, random_agents, num_episodes):
wins = np.zeros(3)
rewards = np.zeros(2)
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = random_agents[player_id].step(time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [agent.step(time_step, is_evaluation=True) for agent in random_agents]
action_list = [agent_output.action for agent_output in agents_output]
time_step = env.step(action_list)
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def main(unused_argv):
game = "kuhn_poker"
num_players = 2
load_path = FLAGS.load_path + FLAGS.experiment_name
env_configs = {"players": num_players}
env = rl_environment.Environment(game, **env_configs)
info_state_size = env.observation_spec()["info_state"][0]
num_actions = env.action_spec()["num_actions"]
hidden_layers_sizes = [int(l) for l in FLAGS.hidden_layers_sizes]
kwargs = {
"replay_buffer_capacity": FLAGS.replay_buffer_capacity,
"epsilon_decay_duration": FLAGS.num_train_episodes,
"epsilon_start": 0.06,
"epsilon_end": 0.001,
}
random_agents = [
random_agent.RandomAgent(player_id=idx, num_actions=num_actions)
for idx in range(num_players)
]
model_dirs = sorted(glob.glob(load_path + "/episode-*"), key=lambda x: int(os.path.split(x)[1][8:]))
for counter, dir in enumerate(model_dirs, 1):
if counter % 1 == 0:
tf.reset_default_graph()
with tf.Session() as sess:
# pylint: disable=g-complex-comprehension
agent0 = nfsp.NFSP(sess, 0, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param_agent0,
**kwargs)
agent1 = nfsp.NFSP(sess, 1, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param_agent1,
**kwargs)
saver = tf.train.Saver()
saver.restore(sess, dir + "/trained_model-10000")
#expl_policies_avg = NFSPPolicies(env, [agent0, agent1], nfsp.MODE.average_policy)
#expl_list, expl = exploitability.exploitability(env.game, expl_policies_avg)
# f3 = open(FLAGS.save_path + "exploitability_list.txt", "a")
# f3.write(str(expl_list[0]) + ' ' + str(expl_list[1]) + '\n')
# f4 = open(FLAGS.save_path + "exploitability_avg.txt", "a")
# f4.write(str(expl) + '\n')
# logging.info("Episode: %s, Exploitability AVG %s", counter*10000, expl)
# logging.info("_____________________________________________")
# win_rates_against_random_agent1 = eval_against_random_agent1(env, [agent0, agent1], random_agents, 1000)
# win_rates_against_random_agent0 = eval_against_random_agent0(env, [agent0, agent1], random_agents, 1000)
# win_rates_against_trained_agents = eval_against_trained_agents(env, [agent0, agent1], 1000)
win_rates_between_random_agents, avg_utility_between_random_agents = eval_between_random_agents(env, random_agents, 1000)
f1 = open(FLAGS.save_path + "win_rates/eta_0/win_rates_between_random_agents.txt", "a")
f1.write(str(win_rates_between_random_agents[0]) + ' ' + str(win_rates_between_random_agents[1]) + ' ' + str(win_rates_between_random_agents[2]) + '\n')
f2 = open(FLAGS.save_path + "avg_utility/eta_0/avg_utility_between_random_agents.txt", "a")
f2.write(str(avg_utility_between_random_agents[0]) + ' ' + str(avg_utility_between_random_agents[1]) + '\n')
logging.info("Episode: %s", counter*10000)
logging.info("Trained_agent0 vs Random_agent1: %s", avg_utility_between_random_agents)
# f1 = open(FLAGS.save_path + "win_rates_against_random_agent1.txt", "a")
# f1.write(str(win_rates_against_random_agent1[0]) + ' ' + str(win_rates_against_random_agent1[1]) + ' ' + str(win_rates_against_random_agent1[2]) + '\n')
# f2 = open(FLAGS.save_path + "win_rates_against_random_agent0.txt", "a")
# f2.write(str(win_rates_against_random_agent0[0]) + ' ' + str(win_rates_against_random_agent0[1]) + ' ' + str(win_rates_against_random_agent0[2]) + '\n')
# f3 = open(FLAGS.save_path + "win_rates_against_eachother.txt", "a")
# f3.write(str(win_rates_against_trained_agents[0]) + ' ' + str(win_rates_against_trained_agents[1]) + ' ' + str(win_rates_against_trained_agents[2]) + '\n')
# logging.info("Episode: %s", counter*10000)
# logging.info("Trained_agent0 vs Random_agent1: %s", win_rates_against_random_agent1)
# logging.info("Random_agent0 vs Trained_agent1: %s", win_rates_against_random_agent0)
# logging.info("Trained_agent0 vs Trained_agent1 %s", win_rates_against_trained_agents)
# logging.info("_____________________________________________")
if __name__ == "__main__":
app.run(main)
<file_sep>from absl import app
from absl import flags
import numpy as np
from matplotlib.legend_handler import HandlerLine2D
import matplotlib.pyplot as plt
def read_wr(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
for line in lines[:-1]:
[str1, str2, str3] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
return list1, list2, list3
def read_exploitability(txt_name):
txt_file = open(txt_name, 'r')
lines = txt_file.read().split('\n')
num_list = []
for str in lines[:-1]:
if str == "NaN":
num_list.append(1)
else:
num_list.append(float(str))
return num_list
def read_loss(txt_name):
txt_file = open(txt_name)
lines = txt_file.read().split('\n')
list1 = []
list2 = []
for line in lines[:-1]:
[str1, str2] = line.split(' ')
if str1 != 'None':
list1.append(float(str1))
else:
list1.append(str1)
if str2 != 'None':
list2.append(float(str2))
else:
list2.append(str2)
for idx, number in enumerate(list1):
if number == 'None':
list1[idx] = list1[idx+1]
for number, idx in enumerate(list2):
if number == 'None':
list2[idx] = list2[idx+1]
return list1, list2
def read_behavior_probs(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
list4 = []
list5 = []
list6 = []
list7 = []
list8 = []
for line in lines[:-1]:
[str1, str2, str3, str4, str5, str6, str7, str8] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
list4.append(float(str4))
list5.append(float(str5))
list6.append(float(str6))
list7.append(float(str7))
list8.append(float(str8))
return list1, list2, list3, list4, list5, list6, list7, list8
def au_mean(list, start):
return sum(list[start:]) / len(list[start:])
def main(argv):
kuhn_poker_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/"
kuhn_poker_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_1_7_28/"
ttt_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_26/"
ttt_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_1_7_29/"
kuhn_poker_psro = "/home/jxu8/Code/open_spiel/evaluation_data/eval_kuhn_poker_psro_7_2/"
avg_rewards_0_against_eachother = []
avg_rewards_1_against_eachother = []
avg_rewards_0_against_random_agent1 = []
avg_rewards_0_against_random_agent0 = []
avg_rewards_1_against_random_agent1 = []
avg_rewards_1_against_random_agent0 = []
avg_rewards_0_between_random_agents = []
au_0_against_eachother_mean = []
au_1_against_eachother_mean = []
au_0_against_random_agent1_mean = []
au_0_against_random_agent0_mean = []
au_1_against_random_agent1_mean = []
au_1_against_random_agent0_mean = []
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
# load avg_utility against eachother with eta 0 in training
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility_5000/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
au_0_against_eachother_mean.append([au_mean(avg_rewards_0_against_eachother[0][0], 200), au_mean(avg_rewards_0_against_eachother[0][1], 200)])
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
# load avg_utility against eachother with eta 1 in training
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility_5000/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
au_1_against_eachother_mean.append([au_mean(avg_rewards_1_against_eachother[0][0], 200), au_mean(avg_rewards_1_against_eachother[0][1], 200)])
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
avg_rewards_0_against_random_agent1.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility_5000/eta_0/avg_utility_against_random_agent1.txt'))
avg_rewards_0_against_random_agent0.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility_5000/eta_0/avg_utility_against_random_agent0.txt'))
avg_rewards_1_against_random_agent1.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility_5000/eta_0/avg_utility_against_random_agent1.txt'))
avg_rewards_1_against_random_agent0.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility_5000/eta_0/avg_utility_against_random_agent0.txt'))
au_0_against_random_agent1_mean.append([au_mean(avg_rewards_0_against_random_agent1[0][0], 200), au_mean(avg_rewards_0_against_random_agent1[0][1], 200)])
au_0_against_random_agent0_mean.append([au_mean(avg_rewards_0_against_random_agent0[0][0], 200), au_mean(avg_rewards_0_against_random_agent0[0][1], 200)])
au_1_against_random_agent1_mean.append([au_mean(avg_rewards_1_against_random_agent1[0][0], 200), au_mean(avg_rewards_1_against_random_agent1[0][1], 200)])
au_1_against_random_agent0_mean.append([au_mean(avg_rewards_1_against_random_agent0[0][0], 200), au_mean(avg_rewards_1_against_random_agent0[0][1], 200)])
avg_rewards_0_between_random_agents.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_0/avg_utility_between_random_agents.txt'))
# plt avg utility in kuhn_poker_nfsp_0.1_7_27
plt.figure(figsize=(15, 10))
ax2 = plt.subplot(211)
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility with eta 0 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_eachother[0][0], "b", label="agent0, {}".format(au_0_against_eachother_mean[0][0]))
line2, = plt.plot(avg_rewards_0_against_eachother[0][1], "r", label="agent1, {}".format(au_0_against_eachother_mean[0][1]))
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(212)
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility with eta 0 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_eachother[0][0], "b", label="agent0, {}".format(au_1_against_eachother_mean[0][0]))
line2, = plt.plot(avg_rewards_1_against_eachother[0][1], "r", label="agent1, {}".format(au_1_against_eachother_mean[0][1]))
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
plt.show()
# ax2 = plt.subplot(512)
# ax2.set_title("average utility with eta 1 in evaluation (0.1 in training)")
# line1, = plt.plot(avg_rewards_0_against_eachother[1][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_0_against_eachother[1][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(513)
# ax2.set_title("average utility with eta 0.1 in evaluation (0.1 in training)")
# line1, = plt.plot(avg_rewards_0_against_eachother[2][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_0_against_eachother[2][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(514)
# ax2.set_title("average utility with eta 0_1 in evaluation (0.1 in training)")
# line1, = plt.plot(avg_rewards_0_against_eachother[3][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_0_against_eachother[3][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(515)
# ax2.set_title("average utility with eta 1_0 in evaluation (0.1 in training)")
# line1, = plt.plot(avg_rewards_0_against_eachother[4][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_0_against_eachother[4][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
# plt.show()
# plt avg utility in kuhn_poker_nfsp_1_7_28
# plt.figure(figsize=(15, 20))
# ax2 = plt.subplot(511)
# y_ticks = np.arange(-0.20, 0.25, 0.05)
# ax2.set_title("average utility with eta 0 in evaluation (1 in training)")
# line1, = plt.plot(avg_rewards_1_against_eachother[0][0], "b", label="agent0, {}".format(au_1_against_eachother_mean[0][0]))
# line2, = plt.plot(avg_rewards_1_against_eachother[0][1], "r", label="agent1, {}".format(au_1_against_eachother_mean[0][1]))
# plt.yticks(y_ticks)
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(512)
# ax2.set_title("average utility with eta 1 in evaluation (1 in training)")
# line1, = plt.plot(avg_rewards_1_against_eachother[1][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_1_against_eachother[1][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(513)
# ax2.set_title("average utility with eta 0.1 in evaluation (1 in training)")
# line1, = plt.plot(avg_rewards_1_against_eachother[2][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_1_against_eachother[2][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(514)
# ax2.set_title("average utility with eta 0_1 in evaluation (1 in training)")
# line1, = plt.plot(avg_rewards_1_against_eachother[3][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_1_against_eachother[3][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
#
# ax2 = plt.subplot(515)
# ax2.set_title("average utility with eta 1_0 in evaluation (1 in training)")
# line1, = plt.plot(avg_rewards_1_against_eachother[4][0], "b", label="agent0")
# line2, = plt.plot(avg_rewards_1_against_eachother[4][1], "r", label="agent1")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('avg utility')
# plt.xlabel('episode(*1e4)')
# plt.show()
plt.figure(figsize=(15, 10))
ax2 = plt.subplot(211)
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility(trained agent0 vs random agent1) with eta 0 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_random_agent1[0][0], "b", label="trained_agent0, {}".format(au_0_against_random_agent1_mean[0][0]))
line2, = plt.plot(avg_rewards_0_against_random_agent1[0][1], "r", label="random_agent1, {}".format(au_0_against_random_agent1_mean[0][1]))
#plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(212)
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility(random agent0 vs trained agent1) with eta 0 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_random_agent0[0][0], "r", label="random_agent0, {}".format(au_0_against_random_agent0_mean[0][0]))
line2, = plt.plot(avg_rewards_0_against_random_agent0[0][1], "b", label="trained_agent1, {}".format(au_0_against_random_agent0_mean[0][1]))
#plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 10))
ax2 = plt.subplot(211)
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility(trained agent0 vs random agent1) with eta 0 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_random_agent1[0][0], "b", label="trained_agent0, {}".format(au_1_against_random_agent1_mean[0][0]))
line2, = plt.plot(avg_rewards_1_against_random_agent1[0][1], "r", label="random_agent1, {}".format(au_1_against_random_agent1_mean[0][1]))
#plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(212)
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility(random agent0 vs trained agent1) with eta 0 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_random_agent0[0][0], "r", label="random agent0, {}".format(au_1_against_random_agent0_mean[0][0]))
line2, = plt.plot(avg_rewards_1_against_random_agent0[0][1], "b", label="trained_agent1, {}".format(au_1_against_random_agent0_mean[0][1]))
#plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 5))
y_ticks = np.arange(-0.20, 0.25, 0.05)
ax2.set_title("average utility(trained agent0 vs random agent1) with eta 0 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_0_between_random_agents[0][0], "b", label="random_agent0, {}".format(au_mean(avg_rewards_0_between_random_agents[0][0], 200)))
line2, = plt.plot(avg_rewards_0_between_random_agents[0][1], "r", label="random_agent1, {}".format(au_mean(avg_rewards_0_between_random_agents[0][1], 200)))
#plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
plt.show()
if __name__ == "__main__":
app.run(main)<file_sep># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NFSP agents trained on Kuhn Poker."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v1 as tf
from open_spiel.python import policy
from open_spiel.python import rl_environment
from open_spiel.python.algorithms import exploitability
from open_spiel.python.algorithms import nfsp
import glob
import os
import numpy as np
from open_spiel.python.algorithms import random_agent
FLAGS = flags.FLAGS
flags.DEFINE_integer("num_train_episodes", int(3e6),
"Number of training episodes.")
flags.DEFINE_integer("eval_every", 100,
"Episode frequency at which the agents are evaluated.")
flags.DEFINE_list("hidden_layers_sizes", [
128,
], "Number of hidden units in the avg-net and Q-net.")
flags.DEFINE_integer("replay_buffer_capacity", int(2e5),
"Size of the replay buffer.")
flags.DEFINE_integer("reservoir_buffer_capacity", int(2e6),
"Size of the reservoir buffer.")
flags.DEFINE_float("anticipatory_param_agent0", 0,
"Prob of using the rl best response as episode policy for agent0.")
flags.DEFINE_float("anticipatory_param_agent1", 0,
"Prob of using the rl best response as episode policy for agent1.")
flags.DEFINE_string("experiment_name", "tic_tac_toe_0.1_7_26", "Experiment name")
flags.DEFINE_string("load_path", "/home/jxu8/Code_update/open_spiel/sessions_nfsp/", "Path to load the session")
flags.DEFINE_string("save_path", "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_26/", "Path to load the session")
class NFSPPolicies(policy.Policy):
"""Joint policy to be evaluated."""
def __init__(self, env, nfsp_policies, mode):
game = env.game
player_ids = [0, 1]
super(NFSPPolicies, self).__init__(game, player_ids)
self._policies = nfsp_policies
self._mode = mode
self._obs = {"info_state": [None, None], "legal_actions": [None, None]}
def action_probabilities(self, state, player_id=None):
cur_player = state.current_player()
legal_actions = state.legal_actions(cur_player)
self._obs["current_player"] = cur_player
self._obs["info_state"][cur_player] = (
state.information_state_tensor(cur_player))
self._obs["legal_actions"][cur_player] = legal_actions
info_state = rl_environment.TimeStep(
observations=self._obs, rewards=None, discounts=None, step_type=None)
with self._policies[cur_player].temp_mode_as(self._mode):
p = self._policies[cur_player].step(info_state, is_evaluation=True).probs
prob_dict = {action: p[action] for action in legal_actions}
return prob_dict
def eval_against_random_agent1(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
rewards = np.zeros(2)
cur_agents = [trained_agents[0], random_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def eval_against_random_agent0(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
rewards = np.zeros(2)
cur_agents = [random_agents[0], trained_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def eval_against_trained_agents(env, trained_agents, num_episodes):
wins = np.zeros(3)
rewards = np.zeros(2)
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = trained_agents[player_id].step(time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [agent.step(time_step, is_evaluation=True) for agent in trained_agents]
action_list = [agent_output.action for agent_output in agents_output]
time_step = env.step(action_list)
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def action_probability(env, trained_agents, num_episodes, outcome0, outcome1):
#state_list = []
acti_list = []
prob_list = []
rewards_list = []
tmp_act = []
tmp_probs = []
agent0_1 = np.zeros(2)
agent1_1 = np.zeros(4)
agent0_2 = np.zeros(2)
agent0_1_probs = np.zeros(2)
agent1_1_probs = np.zeros(4)
agent0_2_probs = np.zeros(2)
for _ in range(num_episodes):
time_step = env.reset_jx(outcome0, outcome1)
#state_list.append(outcome_list)
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = trained_agents[player_id].step(
time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [
agent.step(time_step, is_evaluation=True) for agent in trained_agents
]
action_list = [agent_output.action for agent_output in agents_output]
tmp_act.append(agents_output.action)
tmp_probs.append(agents_output.probs)
time_step = env.step(action_list)
rewards = time_step.rewards
rewards_list.append(rewards)
acti_list.append(tmp_act)
prob_list.append(tmp_probs)
tmp_act = []
tmp_probs = []
for action in acti_list:
if action[0] == 0:
agent0_1[0] += 1
if action[1] == 0:
agent1_1[0] += 1
else:
if action[2] == 0:
agent0_2[0] += 1
else:
if action[1] == 0:
agent1_1[2] += 1
agent0_1[1] = num_episodes - agent0_1[0]
agent1_1[3] = agent0_1[1] - agent1_1[2]
agent1_1[1] = agent0_1[0] - agent1_1[0]
agent0_2[1] = agent1_1[1] - agent0_2[0]
agent0_1_probs[0] = agent0_1[0] / num_episodes
agent0_1_probs[1] = agent0_1[1] / num_episodes
agent1_1_probs[agent1_1 != 0] = agent1_1[agent1_1 != 0] / np.array([agent0_1[0], agent0_1[0], agent0_1[1], agent0_1[1]])[agent1_1 != 0]
agent0_2_probs[agent0_2 != 0] = agent0_2[agent0_2 != 0] / agent1_1[1]
return acti_list, prob_list, rewards_list, agent0_1_probs, agent1_1_probs[:2], agent1_1_probs[2:], agent0_2_probs
def policy_porbs(env, trained_agents, outcome0, outcome1):
time_step = env.reset_jx(outcome0, outcome1)
agents_output_0 = trained_agents[0].step(
time_step, is_evaluation=True)
print(env.get_state)
time_step = env.reset_jx(outcome0, outcome1)
time_step = env.step([0])
agents_output_1 = trained_agents[1].step(
time_step, is_evaluation=True)
time_step = env.reset_jx(outcome0, outcome1)
time_step = env.step([1])
agents_output_2 = trained_agents[1].step(
time_step, is_evaluation=True)
time_step = env.reset_jx(outcome0, outcome1)
time_step = env.step([0])
time_step = env.step([1])
agents_output_3 = trained_agents[0].step(
time_step, is_evaluation=True)
return agents_output_0.probs, agents_output_1.probs, agents_output_2.probs, agents_output_3.probs
def main(unused_argv):
game = "kuhn_poker"
num_players = 2
load_path = FLAGS.load_path + FLAGS.experiment_name
env_configs = {"players": num_players}
env = rl_environment.Environment(game, **env_configs)
info_state_size = env.observation_spec()["info_state"][0]
num_actions = env.action_spec()["num_actions"]
hidden_layers_sizes = [int(l) for l in FLAGS.hidden_layers_sizes]
kwargs = {
"replay_buffer_capacity": FLAGS.replay_buffer_capacity,
"epsilon_decay_duration": FLAGS.num_train_episodes,
"epsilon_start": 0.06,
"epsilon_end": 0.001,
}
random_agents = [
random_agent.RandomAgent(player_id=idx, num_actions=num_actions)
for idx in range(num_players)
]
model_dirs = sorted(glob.glob(load_path + "/episode-*"), key=os.path.getmtime)
for counter, dir in enumerate(model_dirs, 1):
if counter % 1 == 0:
tf.reset_default_graph()
with tf.Session() as sess:
# pylint: disable=g-complex-comprehension
agent0 = nfsp.NFSP(sess, 0, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param_agent0,
**kwargs)
agent1 = nfsp.NFSP(sess, 1, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param_agent1,
**kwargs)
saver = tf.train.Saver()
saver.restore(sess, dir + "/trained_model-10000")
expl_policies_avg = NFSPPolicies(env, [agent0, agent1], nfsp.MODE.average_policy)
# expl_list, expl = exploitability.exploitability(env.game, expl_policies_avg)
# f4 = open("/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_24/eval_eta_0/win_rates_against_random_agent1.txt", "a")
# f4.write(str(expl) + '\n')
#
# logging.info("Episode: %s, Exploitability AVG %s", counter*10000, expl)
# logging.info("_____________________________________________")
win_rates_against_random_agent1, avg_utility_against_random_agent1 = eval_against_random_agent1(env, [agent0, agent1], random_agents, 1000)
win_rates_against_random_agent0, avg_utility_against_random_agent0 = eval_against_random_agent0(env, [agent0, agent1], random_agents, 1000)
win_rates_against_trained_agents, avg_utility = eval_against_trained_agents(env, [agent0, agent1], 1000)
behavior_probabilities_0_1 = action_probability(env, [agent0, agent1], 100, 0, 1)
behavior_probabilities_0_2 = action_probability(env, [agent0, agent1], 100, 0, 2)
behavior_probabilities_1_0 = action_probability(env, [agent0, agent1], 100, 1, 0)
behavior_probabilities_1_2 = action_probability(env, [agent0, agent1], 100, 1, 2)
behavior_probabilities_2_0 = action_probability(env, [agent0, agent1], 100, 2, 0)
behavior_probabilities_2_1 = action_probability(env, [agent0, agent1], 100, 2, 1)
policy_probabilities_0_1 = policy_porbs(env, [agent0, agent1], 0, 1)
'''
f1 = open(FLAGS.save_path + "avg_utility_against_random_agent1.txt", "a")
f1.write(str(avg_utility_against_random_agent1[0]) + ' ' + str(avg_utility_against_random_agent1[1]) + '\n')
f2 = open(FLAGS.save_path + "avg_utility_against_random_agent0.txt", "a")
f2.write(str(avg_utility_against_random_agent0[0]) + ' ' + str(avg_utility_against_random_agent0[1]) + '\n')
f3 = open(FLAGS.save_path + "avg_utility_against_eachother.txt", "a")
f3.write(str(avg_utility[0]) + ' ' + str(avg_utility[1]) + '\n')
'''
logging.info("Episode: %s", counter*10000)
logging.info("Trained_agent0 vs Random_agent1: %s", avg_utility_against_random_agent1)
logging.info("Random_agent0 vs Trained_agent1: %s", avg_utility_against_random_agent0)
logging.info("Trained_agent0 vs Trained_agent1 %s", avg_utility)
logging.info("_____________________________________________")
if __name__ == "__main__":
app.run(main)
<file_sep>from absl import app
from absl import flags
import numpy as np
from matplotlib.legend_handler import HandlerLine2D
import matplotlib.pyplot as plt
def read_wr(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
for line in lines[:-1]:
[str1, str2, str3] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
return list1, list2, list3
def read_exploitability(txt_name):
txt_file = open(txt_name, 'r')
lines = txt_file.read().split('\n')
num_list = []
for str in lines[:-1]:
if str == "NaN":
num_list.append(1)
else:
num_list.append(float(str))
return num_list
def read_loss(txt_name):
txt_file = open(txt_name)
lines = txt_file.read().split('\n')
list1 = []
list2 = []
for line in lines[:-1]:
[str1, str2] = line.split(' ')
if str1 != 'None':
list1.append(float(str1))
else:
list1.append(str1)
if str2 != 'None':
list2.append(float(str2))
else:
list2.append(str2)
for idx, number in enumerate(list1):
if number == 'None':
list1[idx] = list1[idx+1]
for number, idx in enumerate(list2):
if number == 'None':
list2[idx] = list2[idx+1]
return list1, list2
def read_behavior_probs(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
list4 = []
list5 = []
list6 = []
list7 = []
list8 = []
for line in lines[:-1]:
[str1, str2, str3, str4, str5, str6, str7, str8] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
list4.append(float(str4))
list5.append(float(str5))
list6.append(float(str6))
list7.append(float(str7))
list8.append(float(str8))
return list1, list2, list3, list4, list5, list6, list7, list8
def main(argv):
kuhn_poker_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/"
kuhn_poker_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_1_7_28/"
ttt_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_26/"
ttt_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_1_7_29/"
kuhn_poker_psro = "/home/jxu8/Code/open_spiel/evaluation_data/eval_kuhn_poker_psro_7_2/"
bp_jk_cb = []
bp_jq_cb = []
bp_kj_cb = []
bp_kq_cb = []
bp_qj_cb = []
bp_qk_cb = []
bp_jk_cb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/competition_based/JK.txt'))
bp_jq_cb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/competition_based/JQ.txt'))
bp_kj_cb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/competition_based/KJ.txt'))
bp_kq_cb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/competition_based/KQ.txt'))
bp_qj_cb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/competition_based/QJ.txt'))
bp_qk_cb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/competition_based/QK.txt'))
bp_jk_cb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/competition_based/JK.txt'))
bp_jq_cb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/competition_based/JQ.txt'))
bp_kj_cb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/competition_based/KJ.txt'))
bp_kq_cb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/competition_based/KQ.txt'))
bp_qj_cb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/competition_based/QJ.txt'))
bp_qk_cb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/competition_based/QK.txt'))
#plt alpha in kuhn_poker_nfsp_0.1(eta 0)
tmp_list = [bp_jk_cb[0], bp_jq_cb[0], bp_kj_cb[0], bp_kq_cb[0], bp_qj_cb[0], bp_qk_cb[0]]
alpha_1 = [1 - tmp_list[0][0][i] for i in range(len(tmp_list[0][0]))]
alpha_2 = [1 - tmp_list[1][0][i] for i in range(len(tmp_list[1][0]))]
alpha_3 = [(1/3) * (1 - tmp_list[2][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_4 = [(1/3) * (1 - tmp_list[3][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_5 = [tmp_list[4][7][i] - 1/3 for i in range(len(tmp_list[4][7]))]
alpha_6 = [tmp_list[5][7][i] - 1/3 for i in range(len(tmp_list[5][7]))]
ax2 = plt.figure(figsize=(10, 5))
#ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
line1, = plt.plot(alpha_1, "b-", label="JK & JQ")
#line2, = plt.plot(alpha_2, "b*", label="2")
line3, = plt.plot(alpha_3, "g-", label="KJ & KQ")
#line4, = plt.plot(alpha_4, "g*", label="4")
line5, = plt.plot(alpha_5, "y-", label="QJ & QK")
#line6, = plt.plot(alpha_6, "y*", label="6")
#plt.legend(handles=[line1, line2, line3, line4, line5, line6], loc='upper right')
plt.legend(handles=[line1, line3, line5], loc='upper right')
plt.ylabel('alpha')
plt.xlabel('episode(*1e4)')
plt.show()
#plt alpha in kuhn_poker_nfsp_1(eta 0)
tmp_list = [bp_jk_cb[1], bp_jq_cb[1], bp_kj_cb[1], bp_kq_cb[1], bp_qj_cb[1], bp_qk_cb[1]]
alpha_1 = [1 - tmp_list[0][0][i] for i in range(len(tmp_list[0][0]))]
alpha_2 = [1 - tmp_list[1][0][i] for i in range(len(tmp_list[1][0]))]
alpha_3 = [(1/3) * (1 - tmp_list[2][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_4 = [(1/3) * (1 - tmp_list[3][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_5 = [tmp_list[4][7][i] - 1/3 for i in range(len(tmp_list[4][7]))]
alpha_6 = [tmp_list[5][7][i] - 1/3 for i in range(len(tmp_list[5][7]))]
ax2 = plt.figure(figsize=(10, 5))
#ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
line1, = plt.plot(alpha_1, "b-", label="JK & JQ")
#line2, = plt.plot(alpha_2, "b*", label="2")
line3, = plt.plot(alpha_3, "g-", label="KJ & KQ")
#line4, = plt.plot(alpha_4, "g*", label="4")
line5, = plt.plot(alpha_5, "y-", label="QJ & QK")
#line6, = plt.plot(alpha_6, "y*", label="6")
#plt.legend(handles=[line1, line2, line3, line4, line5, line6], loc='upper right')
plt.legend(handles=[line1, line3, line5], loc='upper right')
plt.ylabel('alpha')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_cb[0][0], "b", label="1")
line2, = plt.plot(bp_jk_cb[0][2], "r", label="2")
line3, = plt.plot(bp_jk_cb[0][4], "g", label="3")
line4, = plt.plot(bp_jk_cb[0][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_cb[0][0], "b", label="1")
line2, = plt.plot(bp_jq_cb[0][2], "r", label="2")
line3, = plt.plot(bp_jq_cb[0][4], "g", label="3")
line4, = plt.plot(bp_jq_cb[0][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_cb[0][0], "b", label="1")
line2, = plt.plot(bp_kj_cb[0][2], "r", label="2")
line3, = plt.plot(bp_kj_cb[0][4], "g", label="3")
line4, = plt.plot(bp_kj_cb[0][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_cb[0][0], "b", label="1")
line2, = plt.plot(bp_kq_cb[0][2], "r", label="2")
line3, = plt.plot(bp_kq_cb[0][4], "g", label="3")
line4, = plt.plot(bp_kq_cb[0][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_cb[0][0], "b", label="1")
line2, = plt.plot(bp_qj_cb[0][2], "r", label="2")
line3, = plt.plot(bp_qj_cb[0][4], "g", label="3")
line4, = plt.plot(bp_qj_cb[0][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_cb[0][0], "b", label="1")
line2, = plt.plot(bp_qk_cb[0][2], "r", label="2")
line3, = plt.plot(bp_qk_cb[0][4], "g", label="3")
line4, = plt.plot(bp_qk_cb[0][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
# plt bp for kuhn_poker_nfsp_0.1, eta1 in evaluation
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta0 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_cb[1][0], "b", label="1")
line2, = plt.plot(bp_jk_cb[1][2], "r", label="2")
line3, = plt.plot(bp_jk_cb[1][4], "g", label="3")
line4, = plt.plot(bp_jk_cb[1][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_cb[1][0], "b", label="1")
line2, = plt.plot(bp_jq_cb[1][2], "r", label="2")
line3, = plt.plot(bp_jq_cb[1][4], "g", label="3")
line4, = plt.plot(bp_jq_cb[1][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_cb[1][0], "b", label="1")
line2, = plt.plot(bp_kj_cb[1][2], "r", label="2")
line3, = plt.plot(bp_kj_cb[1][4], "g", label="3")
line4, = plt.plot(bp_kj_cb[1][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_cb[1][0], "b", label="1")
line2, = plt.plot(bp_kq_cb[1][2], "r", label="2")
line3, = plt.plot(bp_kq_cb[1][4], "g", label="3")
line4, = plt.plot(bp_kq_cb[1][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_cb[1][0], "b", label="1")
line2, = plt.plot(bp_qj_cb[1][2], "r", label="2")
line3, = plt.plot(bp_qj_cb[1][4], "g", label="3")
line4, = plt.plot(bp_qj_cb[1][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_cb[1][0], "b", label="1")
line2, = plt.plot(bp_qk_cb[1][2], "r", label="2")
line3, = plt.plot(bp_qk_cb[1][4], "g", label="3")
line4, = plt.plot(bp_qk_cb[1][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
if __name__ == "__main__":
app.run(main)<file_sep># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NFSP agents trained on Kuhn Poker."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v1 as tf
from open_spiel.python import policy
from open_spiel.python import rl_environment
from open_spiel.python.algorithms import exploitability
from open_spiel.python.algorithms import nfsp
import numpy as np
from open_spiel.python.algorithms import random_agent
FLAGS = flags.FLAGS
flags.DEFINE_integer("num_train_episodes", int(3e6),
"Number of training episodes.")
flags.DEFINE_integer("eval_every", 100,
"Episode frequency at which the agents are evaluated.")
flags.DEFINE_list("hidden_layers_sizes", [
128,
], "Number of hidden units in the avg-net and Q-net.")
flags.DEFINE_integer("replay_buffer_capacity", int(2e5),
"Size of the replay buffer.")
flags.DEFINE_integer("reservoir_buffer_capacity", int(2e6), #
"Size of the reservoir buffer.")
flags.DEFINE_float("anticipatory_param", 0.1,
"Prob of using the rl best response as episode policy.")
flags.DEFINE_integer("save_every", 100,
"Episode frequency at which the agents are evaluated.")
class NFSPPolicies(policy.Policy):
"""Joint policy to be evaluated."""
def __init__(self, env, nfsp_policies, mode):
game = env.game
player_ids = [0, 1]
super(NFSPPolicies, self).__init__(game, player_ids)
self._policies = nfsp_policies
self._mode = mode
self._obs = {"info_state": [None, None], "legal_actions": [None, None]}
def action_probabilities(self, state, player_id=None):
cur_player = state.current_player()
legal_actions = state.legal_actions(cur_player)
self._obs["current_player"] = cur_player
self._obs["info_state"][cur_player] = (
state.observation_tensor(cur_player))
self._obs["legal_actions"][cur_player] = legal_actions
info_state = rl_environment.TimeStep(
observations=self._obs, rewards=None, discounts=None, step_type=None)
with self._policies[cur_player].temp_mode_as(self._mode):
p = self._policies[cur_player].step(info_state, is_evaluation=True).probs
prob_dict = {action: p[action] for action in legal_actions}
return prob_dict
def eval_against_trained_agents(env, trained_agents, num_episodes):
wins = np.zeros(3)
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = trained_agents[player_id].step(time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [agent.step(time_step, is_evaluation=True) for agent in trained_agents]
action_list = [agent_output.action for agent_output in agents_output]
time_step = env.step(action_list)
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes
def eval_against_random_agent1(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
cur_agents = [trained_agents[0], random_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes
def eval_against_random_agent0(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
cur_agents = [random_agents[0], trained_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes
def main(unused_argv):
game = "tic_tac_toe"
num_players = 2
path = '/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_0.1'
env_configs = {"players": num_players}
env = rl_environment.Environment(game)
info_state_size = env.observation_spec()["info_state"][0]
num_actions = env.action_spec()["num_actions"]
hidden_layers_sizes = [int(l) for l in FLAGS.hidden_layers_sizes]
kwargs = {
"replay_buffer_capacity": FLAGS.replay_buffer_capacity,
"epsilon_decay_duration": FLAGS.num_train_episodes,
"epsilon_start": 0.06,
"epsilon_end": 0.001,
}
random_agents = [
random_agent.RandomAgent(player_id=idx, num_actions=num_actions)
for idx in range(num_players)
]
with tf.Session() as sess:
# pylint: disable=g-complex-comprehension
agents = [
nfsp.NFSP(sess, idx, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param,
**kwargs) for idx in range(num_players)
]
expl_policies_avg = NFSPPolicies(env, agents, nfsp.MODE.average_policy)
saver = tf.train.Saver(max_to_keep = 2000)
sess.run(tf.global_variables_initializer())
for ep in range(FLAGS.num_train_episodes):
if (ep + 1) % FLAGS.eval_every == 0:
losses = [agent.loss for agent in agents]
win_rates_against_random_agent1 = eval_against_random_agent1(env, agents, random_agents, 1000)
win_rates_against_random_agent0 = eval_against_random_agent0(env, agents, random_agents, 1000)
win_rates_against_eachother = eval_against_trained_agents(env, agents, 1000)
logging.info("Losses: %s", losses)
# expl = exploitability.exploitability(env.game, expl_policies_avg)
# logging.info("[%s] Exploitability AVG %s", ep + 1, expl)
# logging.info("_____________________________________________")
logging.info("Win rates: Trained_agent0 vs Random_agent1 %s", win_rates_against_random_agent1)
logging.info("Win rates: Random_agent0 vs Trained_agent1 %s", win_rates_against_random_agent0)
logging.info("Win rates: Trained_agent0 vs Trained_agent1 %s", win_rates_against_eachother)
logging.info("__________________________________________________________________")
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = agents[player_id].step(time_step)
action_list = [agent_output.action]
time_step = env.step(action_list)
# Episode is over, step all agents with final info state.
for agent in agents:
agent.step(time_step)
if (ep + 1) % FLAGS.save_every == 0:
saver.save(sess, path + "/episode-" + str(ep+1) + "/trained_model", global_step = 100)
if __name__ == "__main__":
app.run(main)
<file_sep>from absl import app
from absl import flags
import numpy as np
from matplotlib.legend_handler import HandlerLine2D
import matplotlib.pyplot as plt
def read_wr(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
for line in lines[:-1]:
[str1, str2, str3] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
return list1, list2, list3
def read_exploitability(txt_name):
txt_file = open(txt_name, 'r')
lines = txt_file.read().split('\n')
num_list = []
for str in lines[:-1]:
if str == "NaN":
num_list.append(1)
else:
num_list.append(float(str))
return num_list
def read_loss(txt_name):
txt_file = open(txt_name)
lines = txt_file.read().split('\n')
list1 = []
list2 = []
for line in lines[:-1]:
[str1, str2] = line.split(' ')
if str1 != 'None':
list1.append(float(str1))
else:
list1.append(str1)
if str2 != 'None':
list2.append(float(str2))
else:
list2.append(str2)
for idx, number in enumerate(list1):
if number == 'None':
list1[idx] = list1[idx+1]
for number, idx in enumerate(list2):
if number == 'None':
list2[idx] = list2[idx+1]
return list1, list2
def read_behavior_probs(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
list4 = []
list5 = []
list6 = []
list7 = []
list8 = []
for line in lines[:-1]:
[str1, str2, str3, str4, str5, str6, str7, str8] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
list4.append(float(str4))
list5.append(float(str5))
list6.append(float(str6))
list7.append(float(str7))
list8.append(float(str8))
return list1, list2, list3, list4, list5, list6, list7, list8
def main(argv):
kuhn_poker_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/"
kuhn_poker_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_1_7_28/"
ttt_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_26/"
ttt_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_1_7_29/"
kuhn_poker_psro = "/home/jxu8/Code/open_spiel/evaluation_data/eval_kuhn_poker_psro_7_2/"
bp_jk_pb = []
bp_jq_pb = []
bp_kj_pb = []
bp_kq_pb = []
bp_qj_pb = []
bp_qk_pb = []
bp_jk_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/JK.txt'))
bp_jq_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/JQ.txt'))
bp_kj_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/KJ.txt'))
bp_kq_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/KQ.txt'))
bp_qj_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/QJ.txt'))
bp_qk_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/QK.txt'))
bp_jk_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/JK.txt'))
bp_jq_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/JQ.txt'))
bp_kj_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/KJ.txt'))
bp_kq_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/KQ.txt'))
bp_qj_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/QJ.txt'))
bp_qk_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/QK.txt'))
bp_jk_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/JK.txt'))
bp_jq_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/JQ.txt'))
bp_kj_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/KJ.txt'))
bp_kq_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/KQ.txt'))
bp_qj_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/QJ.txt'))
bp_qk_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/QK.txt'))
bp_jk_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/JK.txt'))
bp_jq_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/JQ.txt'))
bp_kj_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/KJ.txt'))
bp_kq_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/KQ.txt'))
bp_qj_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/QJ.txt'))
bp_qk_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/QK.txt'))
bp_jk_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/JK.txt'))
bp_jq_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/JQ.txt'))
bp_kj_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/KJ.txt'))
bp_kq_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/KQ.txt'))
bp_qj_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/QJ.txt'))
bp_qk_pb.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/QK.txt'))
bp_jk_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/JK.txt'))
bp_jq_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/JQ.txt'))
bp_kj_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/KJ.txt'))
bp_kq_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/KQ.txt'))
bp_qj_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/QJ.txt'))
bp_qk_pb.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/QK.txt'))
#plt alpha in kuhn_poker_nfsp_0.1(eta 0)
tmp_list = [bp_jk_pb[4], bp_jq_pb[4], bp_kj_pb[4], bp_kq_pb[4], bp_qj_pb[4], bp_qk_pb[4]]
alpha_1 = [1 - tmp_list[0][0][i] for i in range(len(tmp_list[0][0]))]
alpha_2 = [1 - tmp_list[1][0][i] for i in range(len(tmp_list[1][0]))]
alpha_3 = [(1/3) * (1 - tmp_list[2][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_4 = [(1/3) * (1 - tmp_list[3][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_5 = [tmp_list[4][7][i] - 1/3 for i in range(len(tmp_list[4][7]))]
alpha_6 = [tmp_list[5][7][i] - 1/3 for i in range(len(tmp_list[5][7]))]
ax2 = plt.figure(figsize=(10, 5))
#ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
line1, = plt.plot(alpha_1, "b-", label="JK & JQ")
#line2, = plt.plot(alpha_2, "b*", label="2")
line3, = plt.plot(alpha_3, "g-", label="KJ & KQ")
#line4, = plt.plot(alpha_4, "g*", label="4")
line5, = plt.plot(alpha_5, "y-", label="QJ & QK")
#line6, = plt.plot(alpha_6, "y*", label="6")
#plt.legend(handles=[line1, line2, line3, line4, line5, line6], loc='upper right')
plt.legend(handles=[line1, line3, line5], loc='upper right')
plt.ylabel('alpha')
plt.xlabel('episode(*1e4)')
plt.show()
#plt alpha in kuhn_poker_nfsp_1(eta 0)
tmp_list = [bp_jk_pb[5], bp_jq_pb[5], bp_kj_pb[5], bp_kq_pb[5], bp_qj_pb[5], bp_qk_pb[5]]
alpha_1 = [1 - tmp_list[0][0][i] for i in range(len(tmp_list[0][0]))]
alpha_2 = [1 - tmp_list[1][0][i] for i in range(len(tmp_list[1][0]))]
alpha_3 = [(1/3) * (1 - tmp_list[2][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_4 = [(1/3) * (1 - tmp_list[3][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_5 = [tmp_list[4][7][i] - 1/3 for i in range(len(tmp_list[4][7]))]
alpha_6 = [tmp_list[5][7][i] - 1/3 for i in range(len(tmp_list[5][7]))]
ax2 = plt.figure(figsize=(10, 5))
#ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
line1, = plt.plot(alpha_1, "b-", label="JK & JQ")
#line2, = plt.plot(alpha_2, "b*", label="2")
line3, = plt.plot(alpha_3, "g-", label="KJ & KQ")
#line4, = plt.plot(alpha_4, "g*", label="4")
line5, = plt.plot(alpha_5, "y-", label="QJ & QK")
#line6, = plt.plot(alpha_6, "y*", label="6")
#plt.legend(handles=[line1, line2, line3, line4, line5, line6], loc='upper right')
plt.legend(handles=[line1, line3, line5], loc='upper right')
plt.ylabel('alpha')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_pb[0][0], "b", label="1")
line2, = plt.plot(bp_jk_pb[0][2], "r", label="2")
line3, = plt.plot(bp_jk_pb[0][4], "g", label="3")
line4, = plt.plot(bp_jk_pb[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_pb[0][0], "b", label="1")
line2, = plt.plot(bp_jq_pb[0][2], "r", label="2")
line3, = plt.plot(bp_jq_pb[0][4], "g", label="3")
line4, = plt.plot(bp_jq_pb[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_pb[0][0], "b", label="1")
line2, = plt.plot(bp_kj_pb[0][2], "r", label="2")
line3, = plt.plot(bp_kj_pb[0][4], "g", label="3")
line4, = plt.plot(bp_kj_pb[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_pb[0][0], "b", label="1")
line2, = plt.plot(bp_kq_pb[0][2], "r", label="2")
line3, = plt.plot(bp_kq_pb[0][4], "g", label="3")
line4, = plt.plot(bp_kq_pb[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_pb[0][0], "b", label="1")
line2, = plt.plot(bp_qj_pb[0][2], "r", label="2")
line3, = plt.plot(bp_qj_pb[0][4], "g", label="3")
line4, = plt.plot(bp_qj_pb[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_pb[0][0], "b", label="1")
line2, = plt.plot(bp_qk_pb[0][2], "r", label="2")
line3, = plt.plot(bp_qk_pb[0][4], "g", label="3")
line4, = plt.plot(bp_qk_pb[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
# plt bp for kuhn_poker_nfsp_0.1, eta1 in evaluation
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_pb[1][0], "b", label="1")
line2, = plt.plot(bp_jk_pb[1][2], "r", label="2")
line3, = plt.plot(bp_jk_pb[1][4], "g", label="3")
line4, = plt.plot(bp_jk_pb[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_pb[1][0], "b", label="1")
line2, = plt.plot(bp_jq_pb[1][2], "r", label="2")
line3, = plt.plot(bp_jq_pb[1][4], "g", label="3")
line4, = plt.plot(bp_jq_pb[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_pb[1][0], "b", label="1")
line2, = plt.plot(bp_kj_pb[1][2], "r", label="2")
line3, = plt.plot(bp_kj_pb[1][4], "g", label="3")
line4, = plt.plot(bp_kj_pb[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_pb[1][0], "b", label="1")
line2, = plt.plot(bp_kq_pb[1][2], "r", label="2")
line3, = plt.plot(bp_kq_pb[1][4], "g", label="3")
line4, = plt.plot(bp_kq_pb[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_pb[1][0], "b", label="1")
line2, = plt.plot(bp_qj_pb[1][2], "r", label="2")
line3, = plt.plot(bp_qj_pb[1][4], "g", label="3")
line4, = plt.plot(bp_qj_pb[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_pb[1][0], "b", label="1")
line2, = plt.plot(bp_qk_pb[1][2], "r", label="2")
line3, = plt.plot(bp_qk_pb[1][4], "g", label="3")
line4, = plt.plot(bp_qk_pb[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_pb[4][0], "b", label="1")
line2, = plt.plot(bp_jk_pb[4][2], "r", label="2")
line3, = plt.plot(bp_jk_pb[4][4], "g", label="3")
line4, = plt.plot(bp_jk_pb[4][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_pb[4][0], "b", label="1")
line2, = plt.plot(bp_jq_pb[4][2], "r", label="2")
line3, = plt.plot(bp_jq_pb[4][4], "g", label="3")
line4, = plt.plot(bp_jq_pb[4][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_pb[4][0], "b", label="1")
line2, = plt.plot(bp_kj_pb[4][2], "r", label="2")
line3, = plt.plot(bp_kj_pb[4][4], "g", label="3")
line4, = plt.plot(bp_kj_pb[4][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_pb[4][0], "b", label="1")
line2, = plt.plot(bp_kq_pb[4][2], "r", label="2")
line3, = plt.plot(bp_kq_pb[4][4], "g", label="3")
line4, = plt.plot(bp_kq_pb[4][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_pb[4][0], "b", label="1")
line2, = plt.plot(bp_qj_pb[4][2], "r", label="2")
line3, = plt.plot(bp_qj_pb[4][4], "g", label="3")
line4, = plt.plot(bp_qj_pb[4][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_pb[4][0], "b", label="1")
line2, = plt.plot(bp_qk_pb[4][2], "r", label="2")
line3, = plt.plot(bp_qk_pb[4][4], "g", label="3")
line4, = plt.plot(bp_qk_pb[4][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_pb[2][0], "b", label="1")
line2, = plt.plot(bp_jk_pb[2][2], "r", label="2")
line3, = plt.plot(bp_jk_pb[2][4], "g", label="3")
line4, = plt.plot(bp_jk_pb[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_pb[2][0], "b", label="1")
line2, = plt.plot(bp_jq_pb[2][2], "r", label="2")
line3, = plt.plot(bp_jq_pb[2][4], "g", label="3")
line4, = plt.plot(bp_jq_pb[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_pb[2][0], "b", label="1")
line2, = plt.plot(bp_kj_pb[2][2], "r", label="2")
line3, = plt.plot(bp_kj_pb[2][4], "g", label="3")
line4, = plt.plot(bp_kj_pb[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_pb[2][0], "b", label="1")
line2, = plt.plot(bp_kq_pb[2][2], "r", label="2")
line3, = plt.plot(bp_kq_pb[2][4], "g", label="3")
line4, = plt.plot(bp_kq_pb[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_pb[2][0], "b", label="1")
line2, = plt.plot(bp_qj_pb[2][2], "r", label="2")
line3, = plt.plot(bp_qj_pb[2][4], "g", label="3")
line4, = plt.plot(bp_qj_pb[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_pb[2][0], "b", label="1")
line2, = plt.plot(bp_qk_pb[2][2], "r", label="2")
line3, = plt.plot(bp_qk_pb[2][4], "g", label="3")
line4, = plt.plot(bp_qk_pb[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_pb[3][0], "b", label="1")
line2, = plt.plot(bp_jk_pb[3][2], "r", label="2")
line3, = plt.plot(bp_jk_pb[3][4], "g", label="3")
line4, = plt.plot(bp_jk_pb[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_pb[3][0], "b", label="1")
line2, = plt.plot(bp_jq_pb[3][2], "r", label="2")
line3, = plt.plot(bp_jq_pb[3][4], "g", label="3")
line4, = plt.plot(bp_jq_pb[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_pb[3][0], "b", label="1")
line2, = plt.plot(bp_kj_pb[3][2], "r", label="2")
line3, = plt.plot(bp_kj_pb[3][4], "g", label="3")
line4, = plt.plot(bp_kj_pb[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_pb[3][0], "b", label="1")
line2, = plt.plot(bp_kq_pb[3][2], "r", label="2")
line3, = plt.plot(bp_kq_pb[3][4], "g", label="3")
line4, = plt.plot(bp_kq_pb[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_pb[3][0], "b", label="1")
line2, = plt.plot(bp_qj_pb[3][2], "r", label="2")
line3, = plt.plot(bp_qj_pb[3][4], "g", label="3")
line4, = plt.plot(bp_qj_pb[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_pb[3][0], "b", label="1")
line2, = plt.plot(bp_qk_pb[3][2], "r", label="2")
line3, = plt.plot(bp_qk_pb[3][4], "g", label="3")
line4, = plt.plot(bp_qk_pb[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta0 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk_pb[5][0], "b", label="1")
line2, = plt.plot(bp_jk_pb[5][2], "r", label="2")
line3, = plt.plot(bp_jk_pb[5][4], "g", label="3")
line4, = plt.plot(bp_jk_pb[5][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq_pb[5][0], "b", label="1")
line2, = plt.plot(bp_jq_pb[5][2], "r", label="2")
line3, = plt.plot(bp_jq_pb[5][4], "g", label="3")
line4, = plt.plot(bp_jq_pb[5][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj_pb[5][0], "b", label="1")
line2, = plt.plot(bp_kj_pb[5][2], "r", label="2")
line3, = plt.plot(bp_kj_pb[5][4], "g", label="3")
line4, = plt.plot(bp_kj_pb[5][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq_pb[5][0], "b", label="1")
line2, = plt.plot(bp_kq_pb[5][2], "r", label="2")
line3, = plt.plot(bp_kq_pb[5][4], "g", label="3")
line4, = plt.plot(bp_kq_pb[5][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj_pb[5][0], "b", label="1")
line2, = plt.plot(bp_qj_pb[5][2], "r", label="2")
line3, = plt.plot(bp_qj_pb[5][4], "g", label="3")
line4, = plt.plot(bp_qj_pb[5][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk_pb[5][0], "b", label="1")
line2, = plt.plot(bp_qk_pb[5][2], "r", label="2")
line3, = plt.plot(bp_qk_pb[5][4], "g", label="3")
line4, = plt.plot(bp_qk_pb[5][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
if __name__ == "__main__":
app.run(main)<file_sep>from absl import app
from absl import flags
import numpy as np
from matplotlib.legend_handler import HandlerLine2D
import matplotlib.pyplot as plt
def read_wr(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
for line in lines[:-1]:
[str1, str2, str3] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
return list1, list2, list3
def read_exploitability(txt_name):
txt_file = open(txt_name, 'r')
lines = txt_file.read().split('\n')
num_list = []
for str in lines[:-1]:
if str == "NaN":
num_list.append(1)
else:
num_list.append(float(str))
return num_list
def merge_expl(txt_name_1, txt_name_2):
num_list_1 = read_exploitability(txt_name_1)
num_list_2 = read_exploitability(txt_name_2)
return num_list_1 + num_list_2
def read_loss(txt_name):
txt_file = open(txt_name)
lines = txt_file.read().split('\n')
list1 = []
list2 = []
for line in lines[:-1]:
[str1, str2] = line.split(' ')
if str1 != 'None':
list1.append(float(str1))
else:
list1.append(str1)
if str2 != 'None':
list2.append(float(str2))
else:
list2.append(str2)
for idx, number in enumerate(list1):
if number == 'None':
list1[idx] = list1[idx+1]
for number, idx in enumerate(list2):
if number == 'None':
list2[idx] = list2[idx+1]
return list1, list2
def read_behavior_probs(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
list4 = []
list5 = []
list6 = []
list7 = []
list8 = []
for line in lines[:-1]:
[str1, str2, str3, str4, str5, str6, str7, str8] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
list4.append(float(str4))
list5.append(float(str5))
list6.append(float(str6))
list7.append(float(str7))
list8.append(float(str8))
return list1, list2, list3, list4, list5, list6, list7, list8
def main(argv):
kuhn_poker_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/"
kuhn_poker_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_1_7_28/"
ttt_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_8_6/"
ttt_nfsp_0_2 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.2_8_6/"
ttt_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_1_7_29/"
kuhn_poker_psro = "/home/jxu8/Code/open_spiel/evaluation_data/eval_kuhn_poker_psro_7_2/"
expl_0 = []
expl_1 = []
expl_0_2 = []
expl_0_5 = []
expl_0_0_1 = []
loss_agent0 = []
loss_agent1 = []
win_rates_against_random1_eta0 = [] #load win rates against random agent1, trained with eta 0.1
win_rates_against_random0_eta0 = [] #load win rates against random agetn0, trained with eta 0.1
win_rates_against_random1_eta1 = []
win_rates_against_random0_eta1 = []
win_rates_against_eachother_eta0 = [] #load win rates against each other, trained with eta 0.1
win_rates_against_eachother_eta1 = []
expl_0.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.1_7_27/' + 'exploitabilities.txt'))
expl_0.append(merge_expl(ttt_nfsp_0 + 'exploitabilities.txt', "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_8_12/exploitabilities_from_1506e4.txt"))
expl_1.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_1_7_28/' + 'exploitability.txt'))
expl_1.append(read_exploitability(ttt_nfsp_1 + 'exploitabilities.txt'))
expl_0_2.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.2_8_6/exploitability.txt'))
expl_0_2.append(merge_expl(ttt_nfsp_0_2 + 'exploitabilities.txt', "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.2_8_12/exploitabilities_from_1506e4.txt"))
expl_0_5.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.5_8_6/exploitability.txt'))
expl_0_0_1.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.01_8_6/exploitability.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.1_7_27/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.1_7_27/loss_agent1.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_1_7_28/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_1_7_28/loss_agent1.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_0.1_7_26/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_0.1_7_26/loss_agent1.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_1_7_29/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_1_7_29/loss_agent1.txt'))
# load win_rates with eta0.1 in training
win_rates_against_random1_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta0.append(read_wr(ttt_nfsp_0_2 + 'win_rates/eta_0/win_rates_against_random_agent1.txt'))
win_rates_against_random0_eta0.append(read_wr(ttt_nfsp_0_2 + 'win_rates/eta_0/win_rates_against_random_agent0.txt'))
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0_2 + 'win_rates/eta_0/win_rates_against_eachother.txt'))
#load win rates with eta1 in training
win_rates_against_random1_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_eachother_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
# episode = range(10, 3010, 10)
# plot exploitability, avg utility in kuhn_poker_nfsp_0.1_7_27
plt.figure(figsize=(10, 3))
#plt.subplot(311)
plt.xlim(-10, 300)
y_ticks = np.arange(0, 0.50, 0.05)
line1, = plt.plot(expl_0[0], "b", label="kuhn_poker_nfsp_0.1")
line2, = plt.plot(expl_0_0_1[0], 'm', label='kuhn_poker_nfsp_0.01')
line3, = plt.plot(expl_0_2[0], 'y', label="kuhn_poker_nfsp_0.2")
line4, = plt.plot(expl_0_5[0], 'g', label='kuhn_poker_nfsp_0.5')
line5, = plt.plot(expl_1[0], "r", label="kuhn_poker_nfsp_1")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4, line5], loc='upper right')
plt.ylabel('exploitability')
plt.xlabel('episode(*1e4)')
plt.show()
#plt win rates in kuhn_poker_nfsp_0.1_7_27
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[1][0], 'b', label='kp_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[1][1], 'r', label='kp_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[1][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[1][0], 'r', label='kp_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[1][1], 'b', label='kp_nfsp_0.1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[1][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[1][0], 'b', label='kp_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[1][1], 'r', label='kp_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[1][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot random agent0 against trained agent1 in ttt_nfsp_0.1_7_24
# plot win rates of trained agents against each other in ttt_nfsp_0.1_7_24
# plt win rates in kuhn_poker_nfsp_1_7_28
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[1][0], 'b', label='kp_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[1][1], 'r', label='kp_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[1][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[1][0], 'r', label='kp_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[1][1], 'b', label='kp_nfsp_1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[1][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[1][0], 'b', label='kp_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[1][1], 'r', label='kp_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[1][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plt.subplot(312)
# line1, = plt.plot(loss_agent0[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent0[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('loss_agent0')
# plt.xlabel('episode(*1e4)')
#
# plt.subplot(313)
# line1, = plt.plot(loss_agent1[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent1[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('loss_agent1')
# plt.xlabel('episode(*1e4)')
# plt.show()
# plot exploitability in tic_tac_toe_nfsp_0.1_7_26, tic_tac_toe_nfsp_1_7_29
plt.figure(figsize=(10,3))
#plt.subplot(311)
plt.ylim(0, 1.05)
y_ticks = np.arange(0, 1.1, 0.1)
x_range_0 = [6*(x+1) for x in range(len(expl_0[1]))]
line1, = plt.plot(x_range_0, expl_0[1], "b", label="ttt_nfsp_0.1")
x_range_1 = [6*(x+1) for x in range(len(expl_0_2[1]))]
line2, = plt.plot(x_range_1, expl_0_2[1], "g", label="ttt_nfsp_0.2")
x_range_2 = [4*(x+1) for x in range(len(expl_1[1]))]
line3, = plt.plot(x_range_2, expl_1[1], "r", label="ttt_nfsp_1")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3], loc='upper right')
plt.ylabel('exploitability')
plt.xlabel('episode(*1e4)')
plt.show()
# plt.subplot(312)
# line1, = plt.plot(loss_agent0[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent0[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='lower right')
# plt.ylabel('loss_agent0')
# plt.xlabel('episode(*1e4)')
#
# plt.subplot(313)
# line1, = plt.plot(loss_agent1[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent1[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='lower right')
# plt.ylabel('loss_agent1')
# plt.xlabel('episode(*1e4)')
# plt.show()
# plot trained agent0 against random agent1 in tic_tac_toe_nfsp_0.1_7_26
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[0][0], 'b', label='ttt_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[0][1], 'r', label='ttt_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[0][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[0][0], 'r', label='ttt_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[0][1], 'b', label='ttt_nfsp_0.1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[0][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[0][0], 'b', label='ttt_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[0][1], 'r', label='ttt_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[0][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[2][0], 'b', label='ttt_nfsp_0.2_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[2][1], 'r', label='ttt_nfsp_0.2_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[2][2], 'g', label='ttt_nfsp_0.2_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[2][0], 'r', label='ttt_nfsp_0.2_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[2][1], 'b', label='ttt_nfsp_0.2_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[2][2], 'g', label='ttt_nfsp_0.2_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[2][0], 'b', label='ttt_nfsp_0.2_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[2][1], 'r', label='ttt_nfsp_0.2_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[2][2], 'g', label='ttt_nfsp_0.2_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot random agent0 against trained agent1 in ttt_nfsp_0.1_7_26
# plt win rates of ttt_nfsp_1_7_29, trained agent0 against random agent1
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[0][0], 'b', label='ttt_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[0][1], 'r', label='ttt_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[0][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[0][0], 'r', label='ttt_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[0][1], 'b', label='ttt_nfsp_1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[0][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[0][0], 'b', label='ttt_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[0][1], 'r', label='ttt_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[0][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
if __name__ == "__main__":
app.run(main)<file_sep>from absl import app
from absl import flags
import numpy as np
from matplotlib.legend_handler import HandlerLine2D
import matplotlib.pyplot as plt
def read_wr(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
for line in lines[:-1]:
[str1, str2, str3] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
return list1, list2, list3
def read_exploitability(txt_name):
txt_file = open(txt_name, 'r')
lines = txt_file.read().split('\n')
num_list = []
for str in lines[:-1]:
if str == "NaN":
num_list.append(1)
else:
num_list.append(float(str))
return num_list
def read_loss(txt_name):
txt_file = open(txt_name)
lines = txt_file.read().split('\n')
list1 = []
list2 = []
for line in lines[:-1]:
[str1, str2] = line.split(' ')
if str1 != 'None':
list1.append(float(str1))
else:
list1.append(str1)
if str2 != 'None':
list2.append(float(str2))
else:
list2.append(str2)
for idx, number in enumerate(list1):
if number == 'None':
list1[idx] = list1[idx+1]
for number, idx in enumerate(list2):
if number == 'None':
list2[idx] = list2[idx+1]
return list1, list2
def read_behavior_probs(txt_name):
text_file = open(txt_name, "r")
lines = text_file.read().split("\n")
list1 = []
list2 = []
list3 = []
list4 = []
list5 = []
list6 = []
list7 = []
list8 = []
for line in lines[:-1]:
[str1, str2, str3, str4, str5, str6, str7, str8] = line.split(" ")
list1.append(float(str1))
list2.append(float(str2))
list3.append(float(str3))
list4.append(float(str4))
list5.append(float(str5))
list6.append(float(str6))
list7.append(float(str7))
list8.append(float(str8))
return list1, list2, list3, list4, list5, list6, list7, list8
def main(argv):
kuhn_poker_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/"
kuhn_poker_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_1_7_28/"
ttt_nfsp_0 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_26/"
ttt_nfsp_1 = "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_1_7_29/"
kuhn_poker_psro = "/home/jxu8/Code/open_spiel/evaluation_data/eval_kuhn_poker_psro_7_2/"
# exploitabilities_kuhn_poker_psro = read_eval(kuhn_poker_psro + 'exploitabilities.txt')
# array = np.array(exploitabilities_kuhn_poker_psro)
# print(array[:,0])
expl_0 = []
expl_1 = []
loss_agent0 = []
loss_agent1 = []
win_rates_against_random1_eta0 = [] #load win rates against random agent1, trained with eta 0.1
win_rates_against_random0_eta0 = [] #load win rates against random agetn0, trained with eta 0.1
win_rates_against_random1_eta1 = [] #load win rates against random agent1, trained with eta 1
win_rates_against_random0_eta1 = [] #load win rates against random agent0, trained with eta 1
win_rates_against_eachother_eta0 = [] #load win rates against each other, trained with eta 0.1
win_rates_against_eachother_eta1 = [] #load win rates against each other, trained with eta 1
bp_jk = []
bp_jq = []
bp_kj = []
bp_kq = []
bp_qj = []
bp_qk = []
avg_rewards_0_against_eachother = []
avg_rewards_1_against_eachother = []
bp_jk.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/JK.txt'))
bp_jq.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/JQ.txt'))
bp_kj.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/KJ.txt'))
bp_kq.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/KQ.txt'))
bp_qj.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/QJ.txt'))
bp_qk.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0.1/policy_based/QK.txt'))
bp_jk.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/JK.txt'))
bp_jq.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/JQ.txt'))
bp_kj.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/KJ.txt'))
bp_kq.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/KQ.txt'))
bp_qj.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/QJ.txt'))
bp_qk.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_1/policy_based/QK.txt'))
bp_jk.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/JK.txt'))
bp_jq.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/JQ.txt'))
bp_kj.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/KJ.txt'))
bp_kq.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/KQ.txt'))
bp_qj.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/QJ.txt'))
bp_qk.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0.1/policy_based/QK.txt'))
bp_jk.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/JK.txt'))
bp_jq.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/JQ.txt'))
bp_kj.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/KJ.txt'))
bp_kq.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/KQ.txt'))
bp_qj.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/QJ.txt'))
bp_qk.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_1/policy_based/QK.txt'))
bp_jk.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/JK.txt'))
bp_jq.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/JQ.txt'))
bp_kj.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/KJ.txt'))
bp_kq.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/KQ.txt'))
bp_qj.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/QJ.txt'))
bp_qk.append(read_behavior_probs(kuhn_poker_nfsp_0 + 'behavior_probs/eta_0/policy_based/QK.txt'))
bp_jk.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/JK.txt'))
bp_jq.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/JQ.txt'))
bp_kj.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/KJ.txt'))
bp_kq.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/KQ.txt'))
bp_qj.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/QJ.txt'))
bp_qk.append(read_behavior_probs(kuhn_poker_nfsp_1 + 'behavior_probs/eta_0/policy_based/QK.txt'))
tmp_list = [bp_jk[4], bp_jq[4], bp_kj[4], bp_kq[4], bp_qj[4], bp_qk[4]]
alpha_1 = [1 - tmp_list[0][0][i] for i in range(len(tmp_list[0][0]))]
alpha_2 = [1 - tmp_list[1][0][i] for i in range(len(tmp_list[1][0]))]
alpha_3 = [(1/3) * (1 - tmp_list[2][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_4 = [(1/3) * (1 - tmp_list[3][0][i]) for i in range(len(tmp_list[2][0]))]
alpha_5 = [2/3 - tmp_list[4][6][i] for i in range(len(tmp_list[4][6]))]
alpha_6 = [2/3 - tmp_list[5][6][i] for i in range(len(tmp_list[5][6]))]
ax2 = plt.figure(figsize=(10, 10))
#ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
line1, = plt.plot(alpha_1, "b-", label="1")
line2, = plt.plot(alpha_2, "b*", label="2")
line3, = plt.plot(alpha_3, "g-", label="3")
line4, = plt.plot(alpha_4, "g*", label="4")
line5, = plt.plot(alpha_5, "y-", label="5")
line6, = plt.plot(alpha_5, "y*", label="6")
plt.legend(handles=[line1, line2, line3, line4, line5, line6], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
expl_0.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.1_7_27/' + 'exploitabilities.txt'))
expl_0.append(read_exploitability(ttt_nfsp_0 + 'exploitabilities.txt'))
expl_1.append(read_exploitability('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_1_7_28/' + 'exploitability.txt'))
expl_1.append(read_exploitability(ttt_nfsp_1 + 'exploitabilities.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.1_7_27/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_0.1_7_27/loss_agent1.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_1_7_28/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/kuhn_poker_1_7_28/loss_agent1.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_0.1_7_26/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_0.1_7_26/loss_agent1.txt'))
loss_agent0.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_1_7_29/loss_agent0.txt'))
loss_agent1.append(read_loss('/home/jxu8/Code_update/open_spiel/sessions_nfsp/tic_tac_toe_1_7_29/loss_agent1.txt'))
# load win_rates with eta0.1 in training
win_rates_against_random1_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0.1/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0.1 in evaluation process
win_rates_against_random0_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0.1/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0.1 in evaluation process
win_rates_against_random1_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_1/win_rates_against_random_agent1.txt'))
win_rates_against_random0_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_1/win_rates_against_random_agent0.txt'))
win_rates_against_random1_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0.1/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0.1 in evaluation process
win_rates_against_random0_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0.1/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0.1 in evaluation process
win_rates_against_random1_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_1/win_rates_against_random_agent1.txt'))
win_rates_against_random0_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_1/win_rates_against_random_agent0.txt'))
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 1 for trained agent1
# (both trained agents use best response policy only)
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0.1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0.1 for both agents
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_0_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0 for trained agent0, and eta 1 for trained agent1
# (trained agetn0 use average policy only and trained agent 1 use best response policy only)
win_rates_against_eachother_eta0.append(read_wr(ttt_nfsp_0 + 'win_rates/eta_1_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 0 for trained agent1
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
win_rates_against_eachother_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 1 for trained agent1
# (both trained agents use best response policy only)
win_rates_against_eachother_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0.1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0.1 for both agents
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
win_rates_against_eachother_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_0_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0 for trained agent0, and eta 1 for trained agent1
# (trained agetn0 use average policy only and trained agent 1 use best response policy only)
win_rates_against_eachother_eta0.append(read_wr(kuhn_poker_nfsp_0 + 'win_rates/eta_1_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 0 for trained agent1
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
#load win rates with eta1 in training
win_rates_against_random1_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0.1/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0.1 in evaluation process
win_rates_against_random0_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0.1/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0.1 in evaluation process
win_rates_against_random1_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_1/win_rates_against_random_agent1.txt'))
win_rates_against_random0_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_1/win_rates_against_random_agent0.txt'))
win_rates_against_random1_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0 in evaluation process (average policy only)
win_rates_against_random0_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0 in evaluation process (average policy only)
win_rates_against_random1_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0.1/win_rates_against_random_agent1.txt')) # load win rates of trained agent0 against random agent1, with eta 0.1 in evaluation process
win_rates_against_random0_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0.1/win_rates_against_random_agent0.txt')) # load win rates of trained agent1 against random agent0, with eta 0.1 in evaluation process
win_rates_against_random1_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_1/win_rates_against_random_agent1.txt'))
win_rates_against_random0_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_1/win_rates_against_random_agent0.txt'))
win_rates_against_eachother_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 1 for trained agent1
# (both trained agents use best response policy only)
win_rates_against_eachother_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0.1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0.1 for both agents
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
win_rates_against_eachother_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_0_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0 for trained agent0, and eta 1 for trained agent1
# (trained agetn0 use average policy only and trained agent 1 use best response policy only)
win_rates_against_eachother_eta1.append(read_wr(ttt_nfsp_1 + 'win_rates/eta_1_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 0 for trained agent1
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
win_rates_against_eachother_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent1, with eta 0 for both in the evaluation process (both use average policy only)
win_rates_against_eachother_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 1 for trained agent1
# (both trained agents use best response policy only)
win_rates_against_eachother_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0.1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0.1 for both agents
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
win_rates_against_eachother_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_0_1/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 0 for trained agent0, and eta 1 for trained agent1
# (trained agetn0 use average policy only and trained agent 1 use best response policy only)
win_rates_against_eachother_eta1.append(read_wr(kuhn_poker_nfsp_1 + 'win_rates/eta_1_0/win_rates_against_eachother.txt')) # load win rates of trained agent0 against trained agent0, with eta 1 for trained agent0, and eta 0 for trained agent1
# (trained agetn0 use best response policy only and trained agent 1 use average policy only)
# load avg_utility against eachother with eta 0 in training
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(kuhn_poker_nfsp_0 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_0_against_eachother.append(read_loss(ttt_nfsp_0 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
# load avg_utility against eachother with eta 1 in training
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(kuhn_poker_nfsp_1 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_0/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_0.1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_0_1/avg_utility_against_eachother.txt'))
avg_rewards_1_against_eachother.append(read_loss(ttt_nfsp_1 + 'avg_utility/eta_1_0/avg_utility_against_eachother.txt'))
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk[0][0], "b", label="1")
line2, = plt.plot(bp_jk[0][2], "r", label="2")
line3, = plt.plot(bp_jk[0][4], "g", label="3")
line4, = plt.plot(bp_jk[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq[0][0], "b", label="1")
line2, = plt.plot(bp_jq[0][2], "r", label="2")
line3, = plt.plot(bp_jq[0][4], "g", label="3")
line4, = plt.plot(bp_jq[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj[0][0], "b", label="1")
line2, = plt.plot(bp_kj[0][2], "r", label="2")
line3, = plt.plot(bp_kj[0][4], "g", label="3")
line4, = plt.plot(bp_kj[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq[0][0], "b", label="1")
line2, = plt.plot(bp_kq[0][2], "r", label="2")
line3, = plt.plot(bp_kq[0][4], "g", label="3")
line4, = plt.plot(bp_kq[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj[0][0], "b", label="1")
line2, = plt.plot(bp_qj[0][2], "r", label="2")
line3, = plt.plot(bp_qj[0][4], "g", label="3")
line4, = plt.plot(bp_qj[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk[0][0], "b", label="1")
line2, = plt.plot(bp_qk[0][2], "r", label="2")
line3, = plt.plot(bp_qk[0][4], "g", label="3")
line4, = plt.plot(bp_qk[0][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
# plt bp for kuhn_poker_nfsp_0.1, eta1 in evaluation
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk[1][0], "b", label="1")
line2, = plt.plot(bp_jk[1][2], "r", label="2")
line3, = plt.plot(bp_jk[1][4], "g", label="3")
line4, = plt.plot(bp_jk[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq[1][0], "b", label="1")
line2, = plt.plot(bp_jq[1][2], "r", label="2")
line3, = plt.plot(bp_jq[1][4], "g", label="3")
line4, = plt.plot(bp_jq[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj[1][0], "b", label="1")
line2, = plt.plot(bp_kj[1][2], "r", label="2")
line3, = plt.plot(bp_kj[1][4], "g", label="3")
line4, = plt.plot(bp_kj[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq[1][0], "b", label="1")
line2, = plt.plot(bp_kq[1][2], "r", label="2")
line3, = plt.plot(bp_kq[1][4], "g", label="3")
line4, = plt.plot(bp_kq[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj[1][0], "b", label="1")
line2, = plt.plot(bp_qj[1][2], "r", label="2")
line3, = plt.plot(bp_qj[1][4], "g", label="3")
line4, = plt.plot(bp_qj[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk[1][0], "b", label="1")
line2, = plt.plot(bp_qk[1][2], "r", label="2")
line3, = plt.plot(bp_qk[1][4], "g", label="3")
line4, = plt.plot(bp_qk[1][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_0.1, eta0 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk[4][0], "b", label="1")
line2, = plt.plot(bp_jk[4][2], "r", label="2")
line3, = plt.plot(bp_jk[4][4], "g", label="3")
line4, = plt.plot(bp_jk[4][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq[4][0], "b", label="1")
line2, = plt.plot(bp_jq[4][2], "r", label="2")
line3, = plt.plot(bp_jq[4][4], "g", label="3")
line4, = plt.plot(bp_jq[4][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj[4][0], "b", label="1")
line2, = plt.plot(bp_kj[4][2], "r", label="2")
line3, = plt.plot(bp_kj[4][4], "g", label="3")
line4, = plt.plot(bp_kj[4][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq[4][0], "b", label="1")
line2, = plt.plot(bp_kq[4][2], "r", label="2")
line3, = plt.plot(bp_kq[4][4], "g", label="3")
line4, = plt.plot(bp_kq[4][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj[4][0], "b", label="1")
line2, = plt.plot(bp_qj[4][2], "r", label="2")
line3, = plt.plot(bp_qj[4][4], "g", label="3")
line4, = plt.plot(bp_qj[4][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk[4][0], "b", label="1")
line2, = plt.plot(bp_qk[4][2], "r", label="2")
line3, = plt.plot(bp_qk[4][4], "g", label="3")
line4, = plt.plot(bp_qk[4][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta0.1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk[2][0], "b", label="1")
line2, = plt.plot(bp_jk[2][2], "r", label="2")
line3, = plt.plot(bp_jk[2][4], "g", label="3")
line4, = plt.plot(bp_jk[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq[2][0], "b", label="1")
line2, = plt.plot(bp_jq[2][2], "r", label="2")
line3, = plt.plot(bp_jq[2][4], "g", label="3")
line4, = plt.plot(bp_jq[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj[2][0], "b", label="1")
line2, = plt.plot(bp_kj[2][2], "r", label="2")
line3, = plt.plot(bp_kj[2][4], "g", label="3")
line4, = plt.plot(bp_kj[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq[2][0], "b", label="1")
line2, = plt.plot(bp_kq[2][2], "r", label="2")
line3, = plt.plot(bp_kq[2][4], "g", label="3")
line4, = plt.plot(bp_kq[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj[2][0], "b", label="1")
line2, = plt.plot(bp_qj[2][2], "r", label="2")
line3, = plt.plot(bp_qj[2][4], "g", label="3")
line4, = plt.plot(bp_qj[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk[2][0], "b", label="1")
line2, = plt.plot(bp_qk[2][2], "r", label="2")
line3, = plt.plot(bp_qk[2][4], "g", label="3")
line4, = plt.plot(bp_qk[2][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta1 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk[3][0], "b", label="1")
line2, = plt.plot(bp_jk[3][2], "r", label="2")
line3, = plt.plot(bp_jk[3][4], "g", label="3")
line4, = plt.plot(bp_jk[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq[3][0], "b", label="1")
line2, = plt.plot(bp_jq[3][2], "r", label="2")
line3, = plt.plot(bp_jq[3][4], "g", label="3")
line4, = plt.plot(bp_jq[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj[3][0], "b", label="1")
line2, = plt.plot(bp_kj[3][2], "r", label="2")
line3, = plt.plot(bp_kj[3][4], "g", label="3")
line4, = plt.plot(bp_kj[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq[3][0], "b", label="1")
line2, = plt.plot(bp_kq[3][2], "r", label="2")
line3, = plt.plot(bp_kq[3][4], "g", label="3")
line4, = plt.plot(bp_kq[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj[3][0], "b", label="1")
line2, = plt.plot(bp_qj[3][2], "r", label="2")
line3, = plt.plot(bp_qj[3][4], "g", label="3")
line4, = plt.plot(bp_qj[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk[3][0], "b", label="1")
line2, = plt.plot(bp_qk[3][2], "r", label="2")
line3, = plt.plot(bp_qk[3][4], "g", label="3")
line4, = plt.plot(bp_qk[3][6], "y", label="4")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("JK (kuhn_poker_nfsp_1, eta0 in evaluation)")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jk[5][0], "b", label="1")
line2, = plt.plot(bp_jk[5][2], "r", label="2")
line3, = plt.plot(bp_jk[5][4], "g", label="3")
line4, = plt.plot(bp_jk[5][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("JQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_jq[5][0], "b", label="1")
line2, = plt.plot(bp_jq[5][2], "r", label="2")
line3, = plt.plot(bp_jq[5][4], "g", label="3")
line4, = plt.plot(bp_jq[5][6], "y", label="4")
plt.axhline(y=2/3,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("KJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kj[5][0], "b", label="1")
line2, = plt.plot(bp_kj[5][2], "r", label="2")
line3, = plt.plot(bp_kj[5][4], "g", label="3")
line4, = plt.plot(bp_kj[5][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(10, 10))
ax2 = plt.subplot(311)
ax2.set_title("KQ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_kq[5][0], "b", label="1")
line2, = plt.plot(bp_kq[5][2], "r", label="2")
line3, = plt.plot(bp_kq[5][4], "g", label="3")
line4, = plt.plot(bp_kq[5][6], "y", label="4")
plt.axhline(y=0,ls=":",c="blue")
plt.axhline(y=1,ls=":",c="blue")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title("QJ")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qj[5][0], "b", label="1")
line2, = plt.plot(bp_qj[5][2], "r", label="2")
line3, = plt.plot(bp_qj[5][4], "g", label="3")
line4, = plt.plot(bp_qj[5][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title("QK")
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 1.1, 0.1)
line1, = plt.plot(bp_qk[5][0], "b", label="1")
line2, = plt.plot(bp_qk[5][2], "r", label="2")
line3, = plt.plot(bp_qk[5][4], "g", label="3")
line4, = plt.plot(bp_qk[5][6], "y", label="4")
plt.axhline(y=1/3,ls=":",c="yellow")
plt.axhline(y=2/3,ls=":",c="yellow")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2, line3, line4], loc='upper right')
plt.ylabel('behavior_probs')
plt.xlabel('episode(*1e4)')
plt.show()
'''
# episode = range(10, 3010, 10)
# plot exploitability, avg utility in kuhn_poker_nfsp_0.1_7_27
plt.figure(figsize=(10, 3))
#plt.subplot(311)
#plt.ylim(0, 0.35)
y_ticks = np.arange(0, 0.35, 0.05)
line1, = plt.plot(expl_0[0], "b", label="kuhn_poker_nfsp_0.1")
line2, = plt.plot(expl_1[0], "r", label="kuhn_poker_nfsp_1")
plt.yticks(y_ticks)
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('exploitability')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 20))
ax2 = plt.subplot(511)
ax2.set_title("average utility with eta 0 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_eachother[0][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_0_against_eachother[0][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(512)
ax2.set_title("average utility with eta 1 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_eachother[1][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_0_against_eachother[1][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(513)
ax2.set_title("average utility with eta 0.1 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_eachother[2][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_0_against_eachother[2][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(514)
ax2.set_title("average utility with eta 0_1 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_eachother[3][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_0_against_eachother[3][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(515)
ax2.set_title("average utility with eta 1_0 in evaluation (0.1 in training)")
line1, = plt.plot(avg_rewards_0_against_eachother[4][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_0_against_eachother[4][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
plt.show()
#plt win rates in kuhn_poker_nfsp_0.1_7_27
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[3][0], 'b', label='kp_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[3][1], 'r', label='kp_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[3][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('trained agent0 against random agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[4][0], 'b', label='kp_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[4][1], 'r', label='kp_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[4][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('trained agent0 against random agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[5][0], 'b', label='kp_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[5][1], 'r', label='kp_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[5][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot random agent0 against trained agent1 in ttt_nfsp_0.1_7_24
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[3][0], 'r', label='kp_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[3][1], 'b', label='kp_nfsp_0.1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[3][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[4][0], 'r', label='kp_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[4][1], 'b', label='kp_nfsp_0.1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[4][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('random agent0 against trained agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[5][0], 'r', label='kp_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[5][1], 'b', label='kp_nfsp_0.1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[5][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot win rates of trained agents against each other in ttt_nfsp_0.1_7_24
plt.figure(figsize=(15, 20))
# fig.suptitle("win rates against each other")
ax2 = plt.subplot(511)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[5][0], 'b', label='kp_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[5][1], 'r', label='kp_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[5][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(512)
ax2.set_title('win rates against each other(eta 1 for both in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[6][0], 'b', label='kp_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[6][1], 'r', label='kp_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[6][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(513)
ax2.set_title('win rates against each other(eta 0.1 for both in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[7][0], 'b', label='kp_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[7][1], 'r', label='kp_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[7][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(514)
ax2.set_title('win rates against each other(eta 0 for agent0 and eta 1 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[8][0], 'b', label='kp_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[8][1], 'r', label='kp_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[8][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(515)
ax2.set_title('win rates against each other(eta 1 for agent0 and eta 0 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[9][0], 'b', label='kp_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[9][1], 'r', label='kp_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[9][2], 'g', label='kp_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plt avg utility, win rates in kuhn_poker_nfsp_1_7_28
plt.figure(figsize=(15, 20))
ax2 = plt.subplot(511)
ax2.set_title("average utility with eta 0 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_eachother[0][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_1_against_eachother[0][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(512)
ax2.set_title("average utility with eta 1 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_eachother[1][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_1_against_eachother[1][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(513)
ax2.set_title("average utility with eta 0.1 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_eachother[2][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_1_against_eachother[2][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(514)
ax2.set_title("average utility with eta 0_1 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_eachother[3][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_1_against_eachother[3][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(515)
ax2.set_title("average utility with eta 1_0 in evaluation (1 in training)")
line1, = plt.plot(avg_rewards_1_against_eachother[4][0], "b", label="agent0")
line2, = plt.plot(avg_rewards_1_against_eachother[4][1], "r", label="agent1")
plt.legend(handles=[line1, line2], loc='upper right')
plt.ylabel('avg utility')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[3][0], 'b', label='kp_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[3][1], 'r', label='kp_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[3][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('trained agent0 against random agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[4][0], 'b', label='kp_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[4][1], 'r', label='kp_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[4][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('trained agent0 against random agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[5][0], 'b', label='kp_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[5][1], 'r', label='kp_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[5][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[3][0], 'r', label='kp_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[3][1], 'b', label='kp_nfsp_1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[3][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[4][0], 'r', label='kp_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[4][1], 'b', label='kp_nfsp_1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[4][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('random agent0 against trained agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[5][0], 'r', label='kp_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[5][1], 'b', label='kp_nfsp_1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[5][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
plt.figure(figsize=(15, 20))
# fig.suptitle("win rates against each other")
ax2 = plt.subplot(511)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[5][0], 'b', label='kp_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[5][1], 'r', label='kp_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[5][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(512)
ax2.set_title('win rates against each other(eta 1 for both in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[6][0], 'b', label='kp_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[6][1], 'r', label='kp_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[6][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(513)
ax2.set_title('win rates against each other(eta 0.1 for both in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[7][0], 'b', label='kp_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[7][1], 'r', label='kp_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[7][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(514)
ax2.set_title('win rates against each other(eta 0 for agent0 and eta 1 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[8][0], 'b', label='kp_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[8][1], 'r', label='kp_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[8][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(515)
ax2.set_title('win rates against each other(eta 1 for agent0 and eta 0 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[9][0], 'b', label='kp_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[9][1], 'r', label='kp_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[9][2], 'g', label='kp_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plt.subplot(312)
# line1, = plt.plot(loss_agent0[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent0[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('loss_agent0')
# plt.xlabel('episode(*1e4)')
#
# plt.subplot(313)
# line1, = plt.plot(loss_agent1[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent1[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='upper right')
# plt.ylabel('loss_agent1')
# plt.xlabel('episode(*1e4)')
# plt.show()
# plot trained agent0 against random agent1 in kp_nfsp_0.1_6_30
# plot exploitability, avg_utility in tic_tac_toe_nfsp_0.1_7_24
plt.figure(figsize=(10,3))
#plt.subplot(311)
plt.ylim(0, 1.05)
x_range_0 = [4*(x+1) for x in range(len(expl_0[1]))]
line1, = plt.plot(x_range_0, expl_0[1], "b", label="ttt_nfsp_0.1")
x_range_1 = [4*(x+1) for x in range(len(expl_1[1]))]
line2, = plt.plot(x_range_1, expl_1[1], "r", label="ttt_nfsp_1")
plt.legend(handles=[line1, line2], loc='lower right')
plt.ylabel('exploitability')
plt.xlabel('episode(*1e4)')
plt.show()
# plt.subplot(312)
# line1, = plt.plot(loss_agent0[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent0[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='lower right')
# plt.ylabel('loss_agent0')
# plt.xlabel('episode(*1e4)')
#
# plt.subplot(313)
# line1, = plt.plot(loss_agent1[0][0], "b", label="supervised learning loss")
# line2, = plt.plot(loss_agent1[0][1], "r", label="reinforcement learning loss")
# plt.legend(handles=[line1, line2], loc='lower right')
# plt.ylabel('loss_agent1')
# plt.xlabel('episode(*1e4)')
# plt.show()
# plot trained agent0 against random agent1 in tic_tac_toe_nfsp_0.1_7_24
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[0][0], 'b', label='ttt_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[0][1], 'r', label='ttt_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[0][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('trained agent0 against random agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[1][0], 'b', label='ttt_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[1][1], 'r', label='ttt_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[1][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('trained agent0 against random agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta0[2][0], 'b', label='ttt_nfsp_0.1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta0[2][1], 'r', label='ttt_nfsp_0.1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta0[2][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot random agent0 against trained agent1 in ttt_nfsp_0.1_7_24
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[0][0], 'r', label='ttt_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[0][1], 'b', label='ttt_nfsp_0.1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[0][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[1][0], 'r', label='ttt_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[1][1], 'b', label='ttt_nfsp_0.1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[1][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('random agent0 against trained agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta0[2][0], 'r', label='ttt_nfsp_0.1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta0[2][1], 'b', label='ttt_nfsp_0.1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta0[2][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot win rates of trained agents against each other in ttt_nfsp_0.1_7_24
plt.figure(figsize=(15, 20))
# fig.suptitle("win rates against each other")
ax2 = plt.subplot(511)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[0][0], 'b', label='ttt_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[0][1], 'r', label='ttt_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[0][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(512)
ax2.set_title('win rates against each other(eta 1 for both in evaluation)')
plt.ylim(-0.05, 1.05)
line1, = plt.plot(win_rates_against_eachother_eta0[1][0], 'b', label='ttt_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[1][1], 'r', label='ttt_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[1][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(513)
ax2.set_title('win rates against each other(eta 0.1 for both in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[2][0], 'b', label='ttt_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[2][1], 'r', label='ttt_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[2][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(514)
ax2.set_title('win rates against each other(eta 0 for agent0 and eta 1 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[3][0], 'b', label='ttt_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[3][1], 'r', label='ttt_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[3][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(515)
ax2.set_title('win rates against each other(eta 1 for agent0 and eta 0 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta0[4][0], 'b', label='ttt_nfsp_0.1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta0[4][1], 'r', label='ttt_nfsp_0.1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta0[4][2], 'g', label='ttt_nfsp_0.1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plt win rates of ttt_nfsp_1_7_29 with eta 1 in training
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title("trained agent0 against random agent1(eta 0 in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[0][0], 'b', label='ttt_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[0][1], 'r', label='ttt_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[0][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('trained agent0 against random agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[1][0], 'b', label='ttt_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[1][1], 'r', label='ttt_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[1][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('trained agent0 against random agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random1_eta1[2][0], 'b', label='ttt_nfsp_1_trained_agent0')
line2, = plt.plot(win_rates_against_random1_eta1[2][1], 'r', label='ttt_nfsp_1_random_agent1')
line3, = plt.plot(win_rates_against_random1_eta1[2][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot random agent0 against trained agent1 in ttt_nfsp_0.1_7_24
plt.figure(figsize=(15, 12))
ax2 = plt.subplot(311)
ax2.set_title('random agent0 against trained agent1(eta 0 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[0][0], 'r', label='ttt_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[0][1], 'b', label='ttt_nfsp_1_traomed_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[0][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(312)
ax2.set_title('random agent0 against trained agent1(eta 0.1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[1][0], 'r', label='ttt_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[1][1], 'b', label='ttt_nfsp_1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[1][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(313)
ax2.set_title('random agent0 against trained agent1(eta 1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_random0_eta1[2][0], 'r', label='ttt_nfsp_1_random_agent0')
line2, = plt.plot(win_rates_against_random0_eta1[2][1], 'b', label='ttt_nfsp_1_trained_agent1')
line3, = plt.plot(win_rates_against_random0_eta1[2][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
# plot win rates of trained agents against each other in ttt_nfsp_0.1_7_24
plt.figure(figsize=(15, 20))
# fig.suptitle("win rates against each other")
ax2 = plt.subplot(511)
ax2.set_title("win rates against each other(eta 0 for both in evaluation)")
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[0][0], 'b', label='ttt_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[0][1], 'r', label='ttt_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[0][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(512)
ax2.set_title('win rates against each other(eta 1 for both in evaluation)')
plt.ylim(-0.05, 1.05)
line1, = plt.plot(win_rates_against_eachother_eta1[1][0], 'b', label='ttt_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[1][1], 'r', label='ttt_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[1][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(513)
ax2.set_title('win rates against each other(eta 0.1 for both in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[2][0], 'b', label='ttt_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[2][1], 'r', label='ttt_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[2][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(514)
ax2.set_title('win rates against each other(eta 0 for agent0 and eta 1 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[3][0], 'b', label='ttt_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[3][1], 'r', label='ttt_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[3][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
ax2 = plt.subplot(515)
ax2.set_title('win rates against each other(eta 1 for agent0 and eta 0 for agent1 in evaluation)')
plt.ylim(0, 1)
line1, = plt.plot(win_rates_against_eachother_eta1[4][0], 'b', label='ttt_nfsp_1_agent0')
line2, = plt.plot(win_rates_against_eachother_eta1[4][1], 'r', label='ttt_nfsp_1_agent1')
line3, = plt.plot(win_rates_against_eachother_eta1[4][2], 'g', label='ttt_nfsp_1_draw')
plt.legend(handles=[line1, line2, line3], loc='lower right', prop={'size': 9})
plt.ylabel('win_rate')
plt.xlabel('episode(*1e4)')
plt.show()
'''
if __name__ == "__main__":
app.run(main)<file_sep># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NFSP agents trained on Kuhn Poker."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v1 as tf
from open_spiel.python import policy
from open_spiel.python import rl_environment
from open_spiel.python.algorithms import exploitability
from open_spiel.python.algorithms import nfsp
import glob
import os
import numpy as np
from open_spiel.python.algorithms import random_agent
FLAGS = flags.FLAGS
flags.DEFINE_integer("num_train_episodes", int(3e6),
"Number of training episodes.")
flags.DEFINE_integer("eval_every", 100,
"Episode frequency at which the agents are evaluated.")
flags.DEFINE_list("hidden_layers_sizes", [
128,
], "Number of hidden units in the avg-net and Q-net.")
flags.DEFINE_integer("replay_buffer_capacity", int(2e5),
"Size of the replay buffer.")
flags.DEFINE_integer("reservoir_buffer_capacity", int(2e6),
"Size of the reservoir buffer.")
flags.DEFINE_float("anticipatory_param_agent0", 0,
"Prob of using the rl best response as episode policy for agent0.")
flags.DEFINE_float("anticipatory_param_agent1", 0,
"Prob of using the rl best response as episode policy for agent1.")
flags.DEFINE_string("experiment_name", "kuhn_poker_0.1_7_27", "Experiment name")
flags.DEFINE_string("load_path", "/home/jxu8/Code_update/open_spiel/sessions_nfsp/", "Path to load the session")
flags.DEFINE_string("save_path", "/home/jxu8/Code_update/open_spiel/evaluation_data/eval_kp_nfsp_0.1_7_27/", "Path to load the session")
class NFSPPolicies(policy.Policy):
"""Joint policy to be evaluated."""
def __init__(self, env, nfsp_policies, mode):
game = env.game
player_ids = [0, 1]
super(NFSPPolicies, self).__init__(game, player_ids)
self._policies = nfsp_policies
self._mode = mode
self._obs = {"info_state": [None, None], "legal_actions": [None, None]}
def action_probabilities(self, state, player_id=None):
cur_player = state.current_player()
legal_actions = state.legal_actions(cur_player)
self._obs["current_player"] = cur_player
self._obs["info_state"][cur_player] = (
state.information_state_tensor(cur_player))
self._obs["legal_actions"][cur_player] = legal_actions
info_state = rl_environment.TimeStep(
observations=self._obs, rewards=None, discounts=None, step_type=None)
with self._policies[cur_player].temp_mode_as(self._mode):
p = self._policies[cur_player].step(info_state, is_evaluation=True).probs
prob_dict = {action: p[action] for action in legal_actions}
return prob_dict
def eval_against_random_agent1(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
rewards = np.zeros(2)
cur_agents = [trained_agents[0], random_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def eval_against_random_agent0(env, trained_agents, random_agents, num_episodes):
"""Evaluates `trained_agents` against `random_agents` for `num_episodes`."""
wins = np.zeros(3)
rewards = np.zeros(2)
cur_agents = [random_agents[0], trained_agents[1]]
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = cur_agents[player_id].step(time_step, is_evaluation=True)
time_step = env.step([agent_output.action])
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def eval_against_trained_agents(env, trained_agents, num_episodes):
wins = np.zeros(3)
rewards = np.zeros(2)
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = trained_agents[player_id].step(time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [agent.step(time_step, is_evaluation=True) for agent in trained_agents]
action_list = [agent_output.action for agent_output in agents_output]
time_step = env.step(action_list)
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def eval_between_random_agents(env, random_agents, num_episodes):
wins = np.zeros(3)
rewards = np.zeros(2)
for _ in range(num_episodes):
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = random_agents[player_id].step(time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [agent.step(time_step, is_evaluation=True) for agent in random_agents]
action_list = [agent_output.action for agent_output in agents_output]
time_step = env.step(action_list)
rewards[0] += time_step.rewards[0]
rewards[1] += time_step.rewards[1]
if time_step.rewards[0] > 0:
wins[0] += 1
elif time_step.rewards[1] > 0:
wins[1] += 1
else:
wins[2] += 1
return wins / num_episodes, rewards / num_episodes
def action_probability(env, trained_agents, num_episodes, outcome0, outcome1):
#state_list = []
acti_list = []
prob_list = []
rewards_list = []
tmp_act = []
tmp_probs = []
agent0_1 = np.zeros(2)
agent1_1 = np.zeros(4)
agent0_2 = np.zeros(2)
agent0_1_probs = np.zeros(2)
agent1_1_probs = np.zeros(4)
agent0_2_probs = np.zeros(2)
for _ in range(num_episodes):
time_step = env.reset_jx(outcome0, outcome1)
#state_list.append(outcome_list)
while not time_step.last():
player_id = time_step.observations["current_player"]
if env.is_turn_based:
agents_output = trained_agents[player_id].step(
time_step, is_evaluation=True)
action_list = [agents_output.action]
else:
agents_output = [
agent.step(time_step, is_evaluation=True) for agent in trained_agents
]
action_list = [agent_output.action for agent_output in agents_output]
tmp_act.append(agents_output.action)
tmp_probs.append(agents_output.probs)
time_step = env.step(action_list)
rewards = time_step.rewards
rewards_list.append(rewards)
acti_list.append(tmp_act)
prob_list.append(tmp_probs)
tmp_act = []
tmp_probs = []
for action in acti_list:
if action[0] == 0:
agent0_1[0] += 1
if action[1] == 0:
agent1_1[0] += 1
else:
if action[2] == 0:
agent0_2[0] += 1
else:
if action[1] == 0:
agent1_1[2] += 1
agent0_1[1] = num_episodes - agent0_1[0]
agent1_1[3] = agent0_1[1] - agent1_1[2]
agent1_1[1] = agent0_1[0] - agent1_1[0]
agent0_2[1] = agent1_1[1] - agent0_2[0]
agent0_1_probs[0] = agent0_1[0] / num_episodes
agent0_1_probs[1] = agent0_1[1] / num_episodes
agent1_1_probs[agent1_1 != 0] = agent1_1[agent1_1 != 0] / np.array([agent0_1[0], agent0_1[0], agent0_1[1], agent0_1[1]])[agent1_1 != 0]
agent0_2_probs[agent0_2 != 0] = agent0_2[agent0_2 != 0] / agent1_1[1]
return acti_list, prob_list, rewards_list, agent0_1_probs, agent1_1_probs[:2], agent1_1_probs[2:], agent0_2_probs
def policy_porbs(env, trained_agents, outcome0, outcome1):
time_step = env.reset_jx(outcome0, outcome1)
# print(env.get_state)
agents_output_0 = trained_agents[0].step(
time_step, is_evaluation=True)
time_step = env.reset_jx(outcome0, outcome1)
time_step = env.step([0])
agents_output_1 = trained_agents[1].step(
time_step, is_evaluation=True)
time_step = env.reset_jx(outcome0, outcome1)
time_step = env.step([1])
agents_output_2 = trained_agents[1].step(
time_step, is_evaluation=True)
time_step = env.reset_jx(outcome0, outcome1)
time_step = env.step([0])
time_step = env.step([1])
agents_output_3 = trained_agents[0].step(
time_step, is_evaluation=True)
return agents_output_0.probs, agents_output_1.probs, agents_output_2.probs, agents_output_3.probs
def main(unused_argv):
game = "kuhn_poker"
num_players = 2
load_path = FLAGS.load_path + FLAGS.experiment_name
env_configs = {"players": num_players}
env = rl_environment.Environment(game, **env_configs)
info_state_size = env.observation_spec()["info_state"][0]
num_actions = env.action_spec()["num_actions"]
hidden_layers_sizes = [int(l) for l in FLAGS.hidden_layers_sizes]
kwargs = {
"replay_buffer_capacity": FLAGS.replay_buffer_capacity,
"epsilon_decay_duration": FLAGS.num_train_episodes,
"epsilon_start": 0.06,
"epsilon_end": 0.001,
}
random_agents = [
random_agent.RandomAgent(player_id=idx, num_actions=num_actions)
for idx in range(num_players)
]
if FLAGS.anticipatory_param_agent0 == 0 and FLAGS.anticipatory_param_agent1 == 0:
eta_name = 'eta_0'
elif FLAGS.anticipatory_param_agent0 == 0.1 and FLAGS.anticipatory_param_agent1 == 0.1:
eta_name = 'eta_0.1'
elif FLAGS.anticipatory_param_agent0 == 1 and FLAGS.anticipatory_param_agent1 == 1:
eta_name = 'eta_1'
elif FLAGS.anticipatory_param_agent0 == 0 and FLAGS.anticipatory_param_agent1 == 1:
eta_name = 'eta_0_1'
elif FLAGS.anticipatory_param_agent0 == 1 and FLAGS.anticipatory_param_agent1 == 0:
eta_name = 'eta_1_0'
#model_dirs = sorted(glob.glob(load_path + "/episode-*"), key=os.path.getmtime)
model_dirs = sorted(glob.glob(load_path + "/episode-*"), key=lambda x: int(os.path.split(x)[1][8:]))
#model_dirs = glob.glob(load_path + '/episodes-*')
print(model_dirs[0:4])
for counter, dir in enumerate(model_dirs, 1):
if counter % 1 == 0:
tf.reset_default_graph()
with tf.Session() as sess:
# pylint: disable=g-complex-comprehension
agent0 = nfsp.NFSP(sess, 0, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param_agent0,
**kwargs)
agent1 = nfsp.NFSP(sess, 1, info_state_size, num_actions, hidden_layers_sizes,
FLAGS.reservoir_buffer_capacity, FLAGS.anticipatory_param_agent1,
**kwargs)
saver = tf.train.Saver()
saver.restore(sess, dir + "/trained_model-10000")
expl_policies_avg = NFSPPolicies(env, [agent0, agent1], nfsp.MODE.average_policy)
# expl_list, expl = exploitability.exploitability(env.game, expl_policies_avg)
# f4 = open("/home/jxu8/Code_update/open_spiel/evaluation_data/eval_ttt_nfsp_0.1_7_24/eval_eta_0/win_rates_against_random_agent1.txt", "a")
# f4.write(str(expl) + '\n')
#
# logging.info("Episode: %s, Exploitability AVG %s", counter*10000, expl)
# logging.info("_____________________________________________")
win_rates_against_random_agent1, avg_utility_against_random_agent1 = eval_against_random_agent1(env, [agent0, agent1], random_agents, 1000)
win_rates_against_random_agent0, avg_utility_against_random_agent0 = eval_against_random_agent0(env, [agent0, agent1], random_agents, 1000)
win_rates_against_trained_agents, avg_utility = eval_against_trained_agents(env, [agent0, agent1], 1000)
behavior_probabilities_0_1 = action_probability(env, [agent0, agent1], 100, 0, 1)
behavior_probabilities_0_2 = action_probability(env, [agent0, agent1], 100, 0, 2)
behavior_probabilities_1_0 = action_probability(env, [agent0, agent1], 100, 1, 0)
behavior_probabilities_1_2 = action_probability(env, [agent0, agent1], 100, 1, 2)
behavior_probabilities_2_0 = action_probability(env, [agent0, agent1], 100, 2, 0)
behavior_probabilities_2_1 = action_probability(env, [agent0, agent1], 100, 2, 1)
policy_probabilities_0_1 = policy_porbs(env, [agent0, agent1], 0, 1)
policy_probabilities_0_2 = policy_porbs(env, [agent0, agent1], 0, 2)
policy_probabilities_1_0 = policy_porbs(env, [agent0, agent1], 1, 0)
policy_probabilities_1_2 = policy_porbs(env, [agent0, agent1], 1, 2)
policy_probabilities_2_0 = policy_porbs(env, [agent0, agent1], 2, 0)
policy_probabilities_2_1 = policy_porbs(env, [agent0, agent1], 2, 1)
f1 = open(FLAGS.save_path + "avg_utility/" + eta_name + "/avg_utility_against_random_agent1.txt", "a")
f1.write(str(avg_utility_against_random_agent1[0]) + ' ' + str(avg_utility_against_random_agent1[1]) + '\n')
f2 = open(FLAGS.save_path + "avg_utility/" + eta_name + "/avg_utility_against_random_agent0.txt", "a")
f2.write(str(avg_utility_against_random_agent0[0]) + ' ' + str(avg_utility_against_random_agent0[1]) + '\n')
f3 = open(FLAGS.save_path + "avg_utility/" + eta_name + "/avg_utility_against_eachother.txt", "a")
f3.write(str(avg_utility[0]) + ' ' + str(avg_utility[1]) + '\n')
f4 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/competition_based/" + "JQ.txt", "a")
f4.write(str(behavior_probabilities_0_1[3][0]) + ' ' + str(behavior_probabilities_0_1[3][1]) + ' ' +
str(behavior_probabilities_0_1[4][0]) + ' ' + str(behavior_probabilities_0_1[4][1]) + ' ' +
str(behavior_probabilities_0_1[5][0]) + ' ' + str(behavior_probabilities_0_1[5][1]) + ' ' +
str(behavior_probabilities_0_1[6][0]) + ' ' + str(behavior_probabilities_0_1[6][1]) + '\n')
f5 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/competition_based/" + "JK.txt", "a")
f5.write(str(behavior_probabilities_0_2[3][0]) + ' ' + str(behavior_probabilities_0_2[3][1]) + ' ' +
str(behavior_probabilities_0_2[4][0]) + ' ' + str(behavior_probabilities_0_2[4][1]) + ' ' +
str(behavior_probabilities_0_2[5][0]) + ' ' + str(behavior_probabilities_0_2[5][1]) + ' ' +
str(behavior_probabilities_0_2[6][0]) + ' ' + str(behavior_probabilities_0_2[6][1]) + '\n')
f6 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/competition_based/" + "QJ.txt", "a")
f6.write(str(behavior_probabilities_1_0[3][0]) + ' ' + str(behavior_probabilities_1_0[3][1]) + ' ' +
str(behavior_probabilities_1_0[4][0]) + ' ' + str(behavior_probabilities_1_0[4][1]) + ' ' +
str(behavior_probabilities_1_0[5][0]) + ' ' + str(behavior_probabilities_1_0[5][1]) + ' ' +
str(behavior_probabilities_1_0[6][0]) + ' ' + str(behavior_probabilities_1_0[6][1]) + '\n')
f7 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/competition_based/" + "QK.txt", "a")
f7.write(str(behavior_probabilities_1_2[3][0]) + ' ' + str(behavior_probabilities_1_2[3][1]) + ' ' +
str(behavior_probabilities_1_2[4][0]) + ' ' + str(behavior_probabilities_1_2[4][1]) + ' ' +
str(behavior_probabilities_1_2[5][0]) + ' ' + str(behavior_probabilities_1_2[5][1]) + ' ' +
str(behavior_probabilities_1_2[6][0]) + ' ' + str(behavior_probabilities_1_2[6][1]) + '\n')
f8 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/competition_based/" + "KJ.txt", "a")
f8.write(str(behavior_probabilities_2_0[3][0]) + ' ' + str(behavior_probabilities_2_0[3][1]) + ' ' +
str(behavior_probabilities_2_0[4][0]) + ' ' + str(behavior_probabilities_2_0[4][1]) + ' ' +
str(behavior_probabilities_2_0[5][0]) + ' ' + str(behavior_probabilities_2_0[5][1]) + ' ' +
str(behavior_probabilities_2_0[6][0]) + ' ' + str(behavior_probabilities_2_0[6][1]) + '\n')
f9 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/competition_based/" + "KQ.txt", "a")
f9.write(str(behavior_probabilities_2_1[3][0]) + ' ' + str(behavior_probabilities_2_1[3][1]) + ' ' +
str(behavior_probabilities_2_1[4][0]) + ' ' + str(behavior_probabilities_2_1[4][1]) + ' ' +
str(behavior_probabilities_2_1[5][0]) + ' ' + str(behavior_probabilities_2_1[5][1]) + ' ' +
str(behavior_probabilities_2_1[6][0]) + ' ' + str(behavior_probabilities_2_1[6][1]) + '\n')
f10 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/policy_based/" + "JQ.txt", "a")
f10.write(str(policy_probabilities_0_1[0][0]) + ' ' + str(policy_probabilities_0_1[0][1]) + ' ' +
str(policy_probabilities_0_1[1][0]) + ' ' + str(policy_probabilities_0_1[1][1]) + ' ' +
str(policy_probabilities_0_1[2][0]) + ' ' + str(policy_probabilities_0_1[2][1]) + ' ' +
str(policy_probabilities_0_1[3][0]) + ' ' + str(policy_probabilities_0_1[3][1]) + '\n')
f11 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/policy_based/" + "JK.txt", "a")
f11.write(str(policy_probabilities_0_2[0][0]) + ' ' + str(policy_probabilities_0_2[0][1]) + ' ' +
str(policy_probabilities_0_2[1][0]) + ' ' + str(policy_probabilities_0_2[1][1]) + ' ' +
str(policy_probabilities_0_2[2][0]) + ' ' + str(policy_probabilities_0_2[2][1]) + ' ' +
str(policy_probabilities_0_2[3][0]) + ' ' + str(policy_probabilities_0_2[3][1]) + '\n')
f12 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/policy_based/" + "QJ.txt", "a")
f12.write(str(policy_probabilities_1_0[0][0]) + ' ' + str(policy_probabilities_1_0[0][1]) + ' ' +
str(policy_probabilities_1_0[1][0]) + ' ' + str(policy_probabilities_1_0[1][1]) + ' ' +
str(policy_probabilities_1_0[2][0]) + ' ' + str(policy_probabilities_1_0[2][1]) + ' ' +
str(policy_probabilities_1_0[3][0]) + ' ' + str(policy_probabilities_1_0[3][1]) + '\n')
f13 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/policy_based/" + "QK.txt", "a")
f13.write(str(policy_probabilities_1_2[0][0]) + ' ' + str(policy_probabilities_1_2[0][1]) + ' ' +
str(policy_probabilities_1_2[1][0]) + ' ' + str(policy_probabilities_1_2[1][1]) + ' ' +
str(policy_probabilities_1_2[2][0]) + ' ' + str(policy_probabilities_1_2[2][1]) + ' ' +
str(policy_probabilities_1_2[3][0]) + ' ' + str(policy_probabilities_1_2[3][1]) + '\n')
f14 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/policy_based/" + "KJ.txt", "a")
f14.write(str(policy_probabilities_2_0[0][0]) + ' ' + str(policy_probabilities_2_0[0][1]) + ' ' +
str(policy_probabilities_2_0[1][0]) + ' ' + str(policy_probabilities_2_0[1][1]) + ' ' +
str(policy_probabilities_2_0[2][0]) + ' ' + str(policy_probabilities_2_0[2][1]) + ' ' +
str(policy_probabilities_2_0[3][0]) + ' ' + str(policy_probabilities_2_0[3][1]) + '\n')
f15 = open(FLAGS.save_path + "behavior_probs/" + eta_name + "/policy_based/" + "KQ.txt", "a")
f15.write(str(policy_probabilities_2_1[0][0]) + ' ' + str(policy_probabilities_2_1[0][1]) + ' ' +
str(policy_probabilities_2_1[1][0]) + ' ' + str(policy_probabilities_2_1[1][1]) + ' ' +
str(policy_probabilities_2_1[2][0]) + ' ' + str(policy_probabilities_2_1[2][1]) + ' ' +
str(policy_probabilities_2_1[3][0]) + ' ' + str(policy_probabilities_2_1[3][1]) + '\n')
logging.info("Episode: %s", counter*10000)
logging.info("Trained_agent0 vs Random_agent1: %s", avg_utility_against_random_agent1)
logging.info("Random_agent0 vs Trained_agent1: %s", avg_utility_against_random_agent0)
logging.info("Trained_agent0 vs Trained_agent1 %s", avg_utility)
logging.info("_____________________________________________")
if __name__ == "__main__":
app.run(main)
| 2535ae0f25f391ff73f34eba4651b2ddcfcd2202 | [
"Python"
] | 9 | Python | xujing1994/open_spiel | 7663a2717f16ff84c0d6a6bfdf19a9c21b37b765 | cb6ebec107fb5a10d3ffd769f96e0da82c3dc561 | |
refs/heads/master | <file_sep>import gql from "graphql-tag";
const GET_ORGANIZATION_INFO = gql`
query organization($login: String!)
{
organization(login: $login) {
id
name
location
login
avatarUrl
url,
description
}
}
`;
export default GET_ORGANIZATION_INFO;
<file_sep>## Github Client
The project idea is to develop a new design for GitHub.
### The following features were implemented:
-Ability to see logged user info<br />
-Ability to search by users/repositories<br />
-Ability to see by users/repositories<br />
-Ability to see short user information( in preview )<br />
-Ability to see short repository information( in preview ) <br />
-Ability to see extended user information( in preview )<br />
-Ability to see extended repository information( in preview )<br />
-Ability to add/remove star from repositories<br />
-Ability to folow/unfollow users<br />
## Deployed version is located here: [click](https://secret-savannah-93127.herokuapp.com/)
Or you can clone the repository and:
Set dependencies with command:
### `npm install`
Runs the app in the development mode:<br />
### `npm start`
| 3e0f7e43160bc37d03922bebbafa859ff0487ca6 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | NiyazUrazaev/GithubClient | 9f34b16a264c0413d3cecda32175056bc1eb3ff1 | e2ebc6b16d6a946c13c5d1adb1fe9f1ba8276c28 | |
refs/heads/master | <repo_name>vthiery/vscode-prettify-selected-json<file_sep>/src/JsonPrettifier.ts
'use strict';
// The module 'vscode' contains the VS Code extensibility API
// Import the module and reference it with the alias vscode in your code below
import * as vscode from 'vscode';
// Prettify function
function prettify(textEditor, startLine, endLine) {
var range = new vscode.Range(startLine, endLine);
// Return if empty
if (range.isEmpty) {
return;
}
// Get text from range
var ugly = textEditor.document.getText(range);
// Prettify the selection
let json = null;
try {
json = JSON.parse(ugly);
} catch (e) {
vscode.window.showInformationMessage(e.message);
return;
}
// Use the tabSize defined in user settings
const indentSpacing = vscode.window.activeTextEditor.options.tabSize;
// Stringify the result
let pretty = JSON.stringify(json, null, indentSpacing);
textEditor.edit(builder => {
builder.replace(range, pretty);
});
}
// Prettify the active selection following the input method
function prettifyActive() {
var textEditor = vscode.window.activeTextEditor;
var selection = textEditor.selection;
// Apply sort on our selection with a given method
prettify(textEditor, selection.start, selection.end);
}
// Prettify
exports.prettify = prettifyActive;
<file_sep>/vsc-extension-quickstart.md
# Welcome to VS Code Prettify Selected JSON Extension
## What's in the folder
* This folder contains all of the files necessary for developing and packaging the extension
* `package.json` - describe the extension, compilation scripts and lists the dependencies,
* `src/extension.ts` - expose the commands to be called in VS Code,
* `src/JsonPrettifier.ts` - implements the actual JSON prettifier.
## Get up and running straight away
* press `F5` to open a new window with your extension loaded
* run your command from the command palette by pressing (`Ctrl+Shift+P` or `Cmd+Shift+P` on Mac) and typing `Prettify selected JSON`
* set breakpoints in your code inside the sources to debug the extension
## Make changes
* changes can be made via pull requests but should always stick to the original purpose of the extension.
<file_sep>/src/extension.ts
'use strict';
// The module 'vscode' contains the VS Code extensibility API
// Import the module and reference it with the alias vscode in your code below
import * as vscode from 'vscode';
var prettifier = require('./JsonPrettifier');
// this method is called when your extension is activated
// your extension is activated the very first time the command is executed
export function activate(context: vscode.ExtensionContext) {
var commands = [
vscode.commands.registerCommand('prettifyJSON', prettifier.prettify)
];
commands.forEach(
function (command) {
context.subscriptions.push(command);
}
);
}
// this method is called when your extension is deactivated
export function deactivate() {
}<file_sep>/README.md
# Visual Studio Code Prettify Selected JSON Extension
The purpose of this extension is to offer a JSON prettifier that will act on a given selection. Hence, one can only prettify
locally and keep one-line JSON and pretty JSON in the same file.
It is available on Visual Studio Marketplace
**[Prettify Selected JSON in Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=vthiery.prettify-selected-json)**
Or can be forked directly on github and packaged from the sources.
## Installation
* Install via package manager: hit F1 or "Ctrl + Shift + P", type install and then type Prettify Selected JSON and hit enter.
* Clone the git repository and package it up via [vsce](https://code.visualstudio.com/docs/tools/vscecli) for example.
## License
This extension is under MIT license.
| 3556076e9069dd9a3715d55fb1aa1edd8d1e248d | [
"Markdown",
"TypeScript"
] | 4 | TypeScript | vthiery/vscode-prettify-selected-json | 1aadcfc31dcb7f5dfaa198260987b015d93efb44 | 474abf76d201f9fce934846879db1746237a8303 | |
refs/heads/master | <file_sep>let _ = {};
_.clamp = (number, lower, upper) => {
let clamped = Math.max(number, lower)
clamped = Math.min(clamped, upper)
return clamped;
}
_.inRange = (number, start, end) => {
if(end == null){
end = start;
start = 0;
}
if (start > end) {
let temp = start;
start = end;
end = temp;
}
let inRange = number >= start && number < end;
return inRange;
}
_.words = (stringOfWords) => {
let Arr = stringOfWords.split(' ')
return Arr;
}
_.pad = (string, length) => {
let delta = length - string.length;
if (delta > 0){
let space = ' ';
let frontSpace = '';
let front = Math.floor(delta/2);
for (let i = 0; i < front; i++){ //could have used .repeat()
frontSpace = frontSpace+space;
}
let backSpace = '';
let back = Math.ceil(delta/2);
for (let i = 0; i < back; i++){
backSpace = backSpace+space
}
let newString = frontSpace+string+backSpace;
//console.log(`(${newString})`)
return newString;
} else {return string;}
}
_.has = (anObject, aKey) => {
if (anObject[aKey]){
return true;
} else {return false;}
}
_.invert = (object) => {
let temp = {};
let newKeys = {};//tobe inverted object
for (const key in object) { //iterates through object
newKeys[object[key]] = key; //for nondestructive - sets key to value and value to key on new object
temp[key] = object[key]; //The next 3 lines are if you want the original object changed
object[object[key]] = key; //changes the original object property to inverse
delete object[key]; //deletes property
}
//return newKeys;
return object;
}
_.findKey = (object, func) => {
let store;
for (const key in object){
if (func(object[key])){
store = key; //stores key
break;
}
}
if (!store){return undefined} else {
return store;
}
}
_.drop = function (arr, num) {
let newArr = arr;
while (num > 0){
newArr.shift()
num -=1;
}
if (num === undefined){newArr.shift()}
return newArr;
}
_.dropWhile = function (arr, func) {
let newArr = arr;
let i = 1;
while (func(arr[i], i, arr)){
i +=1;
newArr.shift()
}
return newArr;
}
_.chunk = function (arr, n){ //n is size of chunks
if (n === undefined){n=1}; //default size if none chosen
let newArray = []; //array to take chunks
let length = arr.length;
let chunks = Math.floor(length/n); //the length of new array
for(let i = 0; i < chunks; i++){
newArray[i] = arr.slice(i*n, i*n+n)
}
let remain = length%n;
if (remain != 0){ // the last chunk will be length of this value
newArray[chunks] = arr.slice(chunks*n, chunks*n+remain)
}
return newArray;
}
// Do not write or modify code below this line.
module.exports = _;
| fbf24055dc70f9bc00e13853d88cdf12879c4a8d | [
"JavaScript"
] | 1 | JavaScript | hobbitronics/lodash | 3d4e5e4b8a121d7fb7cb89520898b180bbe4ab36 | ffdb4b0666f03f3b84bd861deaa1ec743cfa6698 | |
refs/heads/master | <file_sep>import tornado.ioloop
import tornado.web
import tornado.log
import os
import boto3
client = boto3.client(
'ses',
region_name="us-east-1",
aws_access_key_id=os.environ.get('AWS_ACCESS_KEY'),
aws_secret_access_key=os.environ.get('AWS_SECRET_KEY')
)
from jinja2 import \
Environment, PackageLoader, select_autoescape
ENV = Environment(
loader=PackageLoader('myapp', 'templates'),
autoescape=select_autoescape(['html', 'xml'])
)
class TemplateHandler(tornado.web.RequestHandler):
def render_template (self, tpl, context):
template = ENV.get_template(tpl)
self.write(template.render(**context))
class MainHandler(TemplateHandler):
def get(self):
self.set_header(
'Cache-Control',
'no-store, no-cache, must-revalidate, max-age=0')
name = self.get_query_argument("name", "Nobody")
amount = self.get_query_argument("amount", "0")
amount = float(amount)
amount = amount * 1.15
context = {
"name" : name,
"users" : ["Sam", "mittens", "Chih-Ming"],
"amount" : amount
}
self.render_template("hello.html", context)
class PageHandler(TemplateHandler):
def get(self, page):
page = page + ".html"
self.set_header(
'Cache-Control',
'no-store, no-cache, must-revalidate, max-age=0')
self.render_template(page, {})
def send_email (email, comments):
response = client.send_email(
Destination={
'ToAddresses': ['<EMAIL>'],
},
Message={
'Body': {
'Text': {
'Charset': 'UTF-8',
'Data': '{} wants to talk to you\n\n{}'.format(email, comments),
},
},
'Subject': {'Charset': 'UTF-8', 'Data': 'Test email'},
},
Source='<EMAIL>',
)
class Form1Handler(TemplateHandler):
def get(self):
self.set_header(
'Cache-Control',
'no-store, no-cache, must-revalidate, max-age=0')
self.render_template("form1.html", {})
class FormHandler(TemplateHandler):
def get(self):
search = self.get_query_argument('query', None)
print(search)
# do a look up in my database
self.set_header(
'Cache-Control',
'no-store, no-cache, must-revalidate, max-age=0')
self.render_template("form.html", {})
def post(self):
email = self.get_body_argument('email', None)
comments = self.get_body_argument("comments", None)
error = ""
if email:
print("EMAIL:", email)
send_email(email, comments)
self.redirect("/form-success")
else:
error = "GIVE ME YOUR EMAIL!"
self.set_header(
'Cache-Control',
'no-store, no-cache, must-revalidate, max-age=0')
self.render_template("form.html", {"error": error})
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
(r"/form1", Form1Handler),
(r"/form", FormHandler),
(r"/(page2)", PageHandler),
(r"/(form-success)", PageHandler),
(
r"/static/(.*)",
tornado.web.StaticFileHandler,
{'path': 'static'}
),
], autoreload=True)
if __name__ == "__main__":
tornado.log.enable_pretty_logging()
PORT = os.environ.get('PORT', 8080)
app = make_app()
app.listen(PORT)
tornado.ioloop.IOLoop.current().start() | 0d18ae7b46fc12c2948990b43f0eaff8c4a1b9d8 | [
"Python"
] | 1 | Python | scm2nycotx/tornado-exercise | b5296ba1d63cb7c1d71ea67e856ec4f30db5bc6c | 57e741ea7472ade242bbc5e6dc0c23f34cea6a35 | |
refs/heads/master | <repo_name>tranthaibinh111/MySqlToSqlServer<file_sep>/controllers/__init__.py
from .excel_controller import ExcelController
from .category_controller import CategoryController
from .in_out_product_variable_controller import InOutProductVariableController
from .product_controller import ProductController
from .product_image_controller import ProductImageController
from .product_variable_controller import ProductVariableController
from .product_variable_value_controller import ProductVariableValueController
from .variable_value_controller import VariableValueController
<file_sep>/__main__.py
import os
from controllers import CategoryController, \
InOutProductVariableController, \
ProductController, \
ProductImageController, \
ProductVariableController, \
ProductVariableValueController, \
VariableValueController
if __name__ == "__main__":
root_path = os.getcwd()
# Lấy thông tin các danh mục cần thiết
print("Lấy thông tin các danh mục cần thiết")
category_file = {
"path_full": root_path + "\\import_excel\\20180619_category.xlsx",
"sheet": "category"
}
variable_value_file = {
"path_full": root_path + "\\import_excel\\20180619_variable-value.xlsx",
"sheet": "variable-value"
}
category_controller = CategoryController(category_file["path_full"], category_file["sheet"])
variable_value_controller = VariableValueController(variable_value_file["path_full"], variable_value_file["sheet"])
# Đọc thông tin từ file excel cần import
print("Đọc thông tin từ file excel cần import")
product_file = {
"path_full": root_path + "\\import_excel\\20180619_san-pham.xlsx",
"sheet": "san-pham"
}
product_variable_file = {
"path_full": root_path + "\\import_excel\\20180619_bien-the.xlsx",
"sheet": "bien-the"
}
product_controller = ProductController(product_file["path_full"], product_file["sheet"], category_controller)
product_image_controller = ProductImageController(product_controller)
product_variable_controller = ProductVariableController(product_variable_file["path_full"],
product_variable_file["sheet"],
product_controller)
product_variable_value_controller = ProductVariableValueController(product_variable_controller,
variable_value_controller)
in_out_product_variable_controller = InOutProductVariableController(product_controller,
product_variable_controller,
variable_value_controller)
# Update thông tin table chính từ table phụ
print("Update thông tin table chính từ table phụ")
product_controller.update_product_variable(product_variable_controller.product_parents)
# Xuất file sql import
print("Xuất file sql import")
category_controller.export_sql()
variable_value_controller.export_sql()
product_controller.export_sql()
product_image_controller.export_sql()
product_variable_controller.export_sql()
product_variable_value_controller.export_sql()
in_out_product_variable_controller.export_sql()
print("Ket thuc chuong trinh")<file_sep>/controllers/product_variable_controller.py
import os
from common.sql import CommonSql
from models import ProductVariableEntity
from controllers import ExcelController
class ProductVariableController:
def __init__(self, file_name, sheet_name, product_controller):
self.__file_name = file_name
self.__sheet_name = sheet_name
self.product_variables = []
self.product_parents = []
self.__mapping(product_controller)
def __mapping(self, product_controller):
data = ExcelController.get_data(self.__file_name, self.__sheet_name)
index = 0
for row in data:
index += 1
product_variable = ProductVariableEntity()
product_variable.id = index
product_variable.parent_sku = row[0]
product_variable.sku = row[4]
product_variable.product_id = product_controller.get_product_variable_id(product_variable.parent_sku)
if product_variable.product_id is None:
print(product_variable.parent_sku)
raise Exception("ParentSku khong ton tai trong table tbl_Product")
if row[5] is None:
product_variable.stock = 0
else:
# Do file excel co van de format nen de float
if float(str(row[5]).replace("'", "")) < 0:
product_variable.stock = 0
else:
product_variable.stock = row[5]
if product_variable.stock > 0:
product_variable.stock_status = 1
else:
product_variable.stock_status = 0
product_variable.regular_price = row[7]
product_variable.cost_of_good = row[8]
product_variable.image = row[9]
product_variable.color = row[12]
product_variable.size = row[13]
product_variable.retail_price = row[14]
product_variable.minimum_inventory_level = 2
product_variable.maximum_inventory_level = 10
self.product_variables.append(product_variable)
self.__calculator_product(product_variable.parent_sku,
product_variable.stock,
product_variable.regular_price)
# Su ly lay thong tin stock cho table product
def __calculator_product(self, parent_sku, stock, regular_price):
check_exist = False
min_regular_price = regular_price
for product_stock in self.product_parents:
if product_stock["parent_sku"] == parent_sku:
check_exist = True
product_stock["stock"] += stock
if product_stock["regular_price"] > min_regular_price:
product_stock["regular_price"] = min_regular_price
if not check_exist:
self.product_parents.append({"parent_sku": parent_sku, "stock": stock, "regular_price": regular_price})
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\product_variable.sql"
with open(file_name, mode="w+", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_ProductVariable ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_ProductVariable;\n")
wf.write("\n")
index = 0
for product_variable in self.product_variables:
index += 1
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_ProductVariable("
sql_text += " ID"
sql_text += ", ProductID"
sql_text += ", ParentSKU"
sql_text += ", SKU"
sql_text += ", Stock"
sql_text += ", StockStatus"
sql_text += ", Regular_Price"
sql_text += ", CostOfGood"
sql_text += ", Image"
sql_text += ", ManageStock"
sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
sql_text += ", color"
sql_text += ", size"
sql_text += ", RetailPrice"
sql_text += ", MinimumInventoryLevel"
sql_text += ", MaximumInventoryLevel"
sql_text += ", SupplierID"
sql_text += ", SupplierName"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(product_variable.id)
sql_text += ", " + CommonSql.f_str_value(product_variable.product_id)
sql_text += ", " + CommonSql.f_str_value(product_variable.parent_sku)
sql_text += ", " + CommonSql.f_str_value(product_variable.sku)
sql_text += ", " + CommonSql.f_str_value(product_variable.stock)
sql_text += ", " + CommonSql.f_str_value(product_variable.stock_status)
sql_text += ", " + CommonSql.f_str_value(product_variable.regular_price)
sql_text += ", " + CommonSql.f_str_value(product_variable.cost_of_good)
sql_text += ", " + CommonSql.f_str_value(product_variable.image)
sql_text += ", " + CommonSql.f_str_value(product_variable.manage_stock)
sql_text += ", " + CommonSql.f_str_value(product_variable.is_hidden)
sql_text += ", " + CommonSql.f_str_value(product_variable.created_date)
sql_text += ", " + CommonSql.f_str_value(product_variable.created_by)
sql_text += ", " + CommonSql.f_str_value(product_variable.modified_date)
sql_text += ", " + CommonSql.f_str_value(product_variable.modified_by)
sql_text += ", " + CommonSql.f_str_value(product_variable.color)
sql_text += ", " + CommonSql.f_str_value(product_variable.size)
sql_text += ", " + CommonSql.f_str_value(product_variable.retail_price)
sql_text += ", " + CommonSql.f_str_value(product_variable.minimum_inventory_level)
sql_text += ", " + CommonSql.f_str_value(product_variable.maximum_inventory_level)
sql_text += ", " + CommonSql.f_str_value(product_variable.supplier_id)
sql_text += ", " + CommonSql.f_str_value(product_variable.supplier_name)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_ProductVariable OFF;\n")
<file_sep>/controllers/product_variable_value_controller.py
import os
from common.sql import CommonSql
from models import ProductVariableValueEntity
class ProductVariableValueController:
def __init__(self, product_variable_controller, variable_value_controller):
self.product_variable_values = []
self.__mapping(product_variable_controller, variable_value_controller)
def __mapping(self, product_variable_controller, variable_value_controller):
index = 0
for product_variable in product_variable_controller.product_variables:
product_variable_column = []
if product_variable.color is not None and product_variable.size is not None:
product_variable_column = ["Màu", "Size"]
elif product_variable.color is not None:
product_variable_column = ["Màu"]
elif product_variable.size is not None:
product_variable_column = ["Size"]
for column in product_variable_column:
index += 1
product_variable_value = ProductVariableValueEntity()
product_variable_value.id = index
product_variable_value.product_variable_id = product_variable.id
if column == "Màu":
info_variable_value = variable_value_controller.get_variable_value_info(column,
product_variable.color)
else:
info_variable_value = variable_value_controller.get_variable_value_info(column,
product_variable.size)
if info_variable_value is None:
print(column, " ", product_variable.color, " ", product_variable.size)
raise Exception("Color or Size khong ton tai trong table tbl_VariableValue")
product_variable_value.product_variable_sku = product_variable.sku
product_variable_value.variable_value_id = info_variable_value["id"]
product_variable_value.variable_name = info_variable_value["variable_name"]
product_variable_value.variable_value = info_variable_value["variable_value"]
product_variable_value.is_hidden = product_variable.is_hidden
product_variable_value.created_date = product_variable.created_date
product_variable_value.created_by = product_variable.created_by
product_variable_value.modified_date = product_variable.modified_date
product_variable_value.modified_by = product_variable.modified_by
self.product_variable_values.append(product_variable_value)
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\product_variable_value.sql"
with open(file_name, mode="w+", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_ProductVariableValue ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_ProductVariableValue;\n")
wf.write("\n")
index = 0
for product_variable_value in self.product_variable_values:
index += 1
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_ProductVariableValue("
sql_text += " ID"
sql_text += ", ProductVariableID"
sql_text += ", ProductvariableSKU"
sql_text += ", VariableValueID"
sql_text += ", VariableName"
sql_text += ", VariableValue"
sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(product_variable_value.id)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.product_variable_id)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.product_variable_sku)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.variable_value_id)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.variable_name)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.variable_value)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.is_hidden)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.created_date)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.created_by)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.modified_date)
sql_text += ", " + CommonSql.f_str_value(product_variable_value.modified_by)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_ProductVariableValue OFF;\n")
<file_sep>/models/product_image_entity.py
from datetime import datetime
class ProductImageEntity:
def __init__(self):
self.id = None
self.product_id = None
self.product_image = None
self.is_hidden = 0
self.created_date = datetime.now()
self.created_by = "admin"
self.modified_date = None
self.modified_by = None
<file_sep>/common/sql/__init__.py
from .common_sql import CommonSql
<file_sep>/models/product_variable_entity.py
from datetime import datetime
class ProductVariableEntity:
def __init__(self):
self.id = None
self.product_id = None
self.parent_sku = None
self.stock = 0
self.stock_status = 0
self.regular_price = 0
self.cost_of_good = 0
self.image = None
self.manage_stock = 1
self.is_hidden = 0
self.created_date = datetime.now()
self.created_by = "admin"
self.modified_date = None
self.modified_by = None
self.color = None
self.size = None
self.retail_price = 0
self.minimum_inventory_level = None
self.maximum_inventory_level = None
self.supplier_id = None
self.supplier_name = None
<file_sep>/models/in_out_product_variable_entity.py
from datetime import datetime
class InOutProductVariableEntity:
def __init__(self):
self.id = None
self.agent_id = 1
self.product_id = None
self.product_variable_id = None
self.product_variable_name = None
self.product_variable_value = None
self.quantity = None
self.quantity_current = 0
self.type = 1
self.is_hidden = 0
self.created_date = datetime.now()
self.created_by = "admin"
self.modified_date = None
self.modified_by = None
self.product_type = None
self.note = None
self.order_id = 0
self.session_in_out_id = 0
self.status = 1
self.product_name = None
self.sku = None
self.product_image = None
self.product_variable = None
self.move_pro_id = 0
self.parent_id = None
<file_sep>/models/__init__.py
from .category_entity import CategoryEntity
from .in_out_product_variable_entity import InOutProductVariableEntity
from .product_entity import ProductEntity
from .product_image_entity import ProductImageEntity
from .product_variable_entity import ProductVariableEntity
from .product_variable_value_entity import ProductVariableValueEntity
from .variable_value_entity import VariableValueEntity
<file_sep>/models/category_entity.py
from datetime import datetime
class CategoryEntity:
def __init__(self):
self.id = None
self.category_name = None
self.category_description = None
self.category_level = None
self.parent_id = None
self.is_hidden = None
self.create_date = datetime.now()
self.create_by = "admin"
self.modified_date = None
self.modified_by = None
<file_sep>/controllers/variable_value_controller.py
import os
from common.sql import CommonSql
from models import VariableValueEntity
from controllers import ExcelController
class VariableValueController:
def __init__(self, file_name, sheet_name):
self.__file_name = file_name
self.__sheet_name = sheet_name
self.variable_values = []
self.__mapping()
def __mapping(self):
data = ExcelController.get_data(self.__file_name, self.__sheet_name)
for row in data:
variable_value = VariableValueEntity()
variable_value.id = row[0]
variable_value.variable_id = row[1]
variable_value.variable_name = row[2]
variable_value.variable_value = row[3]
variable_value.variable_value_text = row[9]
variable_value.sku_text = row[10]
self.variable_values.append(variable_value)
def get_variable_value_info(self, variable_name, variable_value_text):
result = None
for variable_value in self.variable_values:
if variable_value.variable_name.upper() == variable_name.upper() \
and str(variable_value.variable_value_text).upper() == str(variable_value_text).upper():
result = {
'id': variable_value.id,
'variable_name': variable_value.variable_name,
'variable_value': variable_value.variable_value,
'sku_text': variable_value.sku_text
}
break
return result
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\variable_value.sql"
with open(file_name, mode="w", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_VariableValue ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_VariableValue;\n")
wf.write("\n")
index = 0
for variable_value in self.variable_values:
index += 1
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_VariableValue("
sql_text += " ID"
sql_text += ", VariableID"
sql_text += ", VariableName"
sql_text += ", VariableValue"
sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
sql_text += ", VariableValueText"
sql_text += ", SKUText"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(variable_value.id)
sql_text += ", " + CommonSql.f_str_value(variable_value.variable_id)
sql_text += ", " + CommonSql.f_str_value(variable_value.variable_name)
sql_text += ", " + CommonSql.f_str_value(variable_value.variable_value)
sql_text += ", " + CommonSql.f_str_value(variable_value.is_hidden)
sql_text += ", " + CommonSql.f_str_value(variable_value.create_date)
sql_text += ", " + CommonSql.f_str_value(variable_value.create_by)
sql_text += ", " + CommonSql.f_str_value(variable_value.modified_date)
sql_text += ", " + CommonSql.f_str_value(variable_value.modified_by)
sql_text += ", " + CommonSql.f_str_value(variable_value.variable_value_text)
sql_text += ", " + CommonSql.f_str_value(variable_value.sku_text)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_VariableValue OFF;\n")
<file_sep>/controllers/in_out_product_variable_controller.py
import os
from common.sql import CommonSql
from models import InOutProductVariableEntity
class InOutProductVariableController:
def __init__(self, product_controller, product_variable_controller, variable_value_controller):
self.in_out_product_variable = []
self.__mapping(product_controller, product_variable_controller, variable_value_controller)
def __mapping(self, product_controller, product_variable_controller, variable_value_controller):
index = 0
for data in product_controller.product_simples:
index += 1
product = InOutProductVariableEntity()
product.id = index
product.product_id = data.id
product.product_variable_id = 0
product.product_variable_name = None
product.product_variable_value = None
product.quantity = data.product_stock
product.product_type = 1
product.sku = data.product_sku
product.product_variable = None
product.parent_id = data.id
self.in_out_product_variable.append(product)
for data in product_variable_controller.product_variables:
index += 1
product = InOutProductVariableEntity()
product.id = index
product.product_id = 0
product.product_variable_id = data.id
if data.color is not None and data.size:
product_variable_name = "Màu|Size"
color = variable_value_controller.get_variable_value_info("Màu", data.color)
size = variable_value_controller.get_variable_value_info("Size", data.size)
product_variable_value = color["variable_value"] + "|" + str(size["variable_value"])
else:
if data.color is not None:
product_variable_name = "Màu|"
color = variable_value_controller.get_variable_value_info("Màu", data.color)
product_variable_value = color["variable_value"] + "|"
elif data.size is not None:
product_variable_name = "Size|"
size = variable_value_controller.get_variable_value_info("Size", data.size)
product_variable_value = str(size["variable_value"]) + "|"
else:
product_variable_name = None
product_variable_value = None
product.product_variable_name = product_variable_name
product.product_variable_value = product_variable_value
product.quantity = data.stock
product.product_type = 2
product.sku = data.sku
product.product_variable = product_variable_value
product.parent_id = data.product_id
self.in_out_product_variable.append(product)
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\in_out_product_variable.sql"
with open(file_name, mode="w+", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_StockManager ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_StockManager;\n")
wf.write("\n")
index = 0
for product in self.in_out_product_variable:
index += 1
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_StockManager("
sql_text += " ID"
sql_text += ", AgentID"
sql_text += ", ProductID"
sql_text += ", ProductVariableID"
# sql_text += ", ProductVariableName"
# sql_text += ", ProductVariableValue"
sql_text += ", Quantity"
sql_text += ", QuantityCurrent"
sql_text += ", Type"
# sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
# sql_text += ", ProductType"
sql_text += ", NoteID"
sql_text += ", OrderID"
# sql_text += ", SessionInOutID"
sql_text += ", Status"
# sql_text += ", ProductName"
sql_text += ", SKU"
# sql_text += ", ProductImage"
# sql_text += ", ProductVariable"
sql_text += ", MoveProID"
sql_text += ", ParentID"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(product.id)
sql_text += ", " + CommonSql.f_str_value(product.agent_id)
sql_text += ", " + CommonSql.f_str_value(product.product_id)
sql_text += ", " + CommonSql.f_str_value(product.product_variable_id)
# sql_text += ", " + CommonSql.f_str_value(product.product_variable_name)
# sql_text += ", " + CommonSql.f_str_value(product.product_variable_value)
sql_text += ", " + CommonSql.f_str_value(product.quantity)
sql_text += ", " + CommonSql.f_str_value(product.quantity_current)
sql_text += ", " + CommonSql.f_str_value(product.type)
# sql_text += ", " + CommonSql.f_str_value(product.is_hidden)
sql_text += ", " + CommonSql.f_str_value(product.created_date)
sql_text += ", " + CommonSql.f_str_value(product.created_by)
sql_text += ", " + CommonSql.f_str_value(product.modified_date)
sql_text += ", " + CommonSql.f_str_value(product.modified_by)
# sql_text += ", " + CommonSql.f_str_value(product.product_type)
sql_text += ", " + CommonSql.f_str_value(product.note)
sql_text += ", " + CommonSql.f_str_value(product.order_id)
# sql_text += ", " + CommonSql.f_str_value(product.session_in_out_id)
sql_text += ", " + CommonSql.f_str_value(product.status)
# sql_text += ", " + CommonSql.f_str_value(product.product_name)
sql_text += ", " + CommonSql.f_str_value(product.sku)
# sql_text += ", " + CommonSql.f_str_value(product.product_image)
# sql_text += ", " + CommonSql.f_str_value(product.product_variable)
sql_text += ", " + CommonSql.f_str_value(product.move_pro_id)
sql_text += ", " + CommonSql.f_str_value(product.parent_id)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_InOutProductVariable OFF;\n")<file_sep>/common/sql/common_sql.py
from datetime import datetime
class CommonSql:
@staticmethod
def f_str_value(parameter):
result = None
if parameter is None:
result = "NULL"
else:
if type(parameter) is str:
result = "N'" + parameter + "'"
elif type(parameter) is int:
result = str(parameter)
elif type(parameter) is float:
result = str(parameter)
elif type(parameter) is datetime:
result = "CAST('" + str(parameter) + "' AS DATETIME2)"
else:
result = None
if result is None:
print(str(parameter) + ": " + str(type(parameter)))
raise Exception("Kieu parameter khong ton tai trong dinh nghia ham common")
else:
return result
<file_sep>/controllers/product_controller.py
import os
from common.sql import CommonSql
from models import ProductEntity
from controllers import ExcelController
class ProductController:
def __init__(self, file_name, sheet_name, category_controller):
self.__file_name = file_name
self.__sheet_name = sheet_name
self.product_simples = []
self.product_variables = []
self.__mapping(category_controller)
def __mapping(self, category_controller):
data = ExcelController.get_data(self.__file_name, self.__sheet_name)
index = 0
for row in data:
index += 1
product = ProductEntity()
product.id = index
if row[14] is not None:
categories = row[14].split(" > ")
category_name = categories[len(categories) - 1]
product.category_id = category_controller.get_category_id(category_name)
if product.category_id is None:
print(category_name)
raise Exception("CategoryName khong ton tai trong table tbl_category")
product.product_title = row[0]
product.product_content = row[3]
product.product_sku = row[5]
if row[13] is not None and row[13].upper() == "VARIABLE":
product.product_stock = 0
product.manage_stock = 0
product.product_style = 2
else:
if row[6] is not None:
if int(str(row[6]).replace("'", "")) > 0:
product.product_stock = row[6]
else:
product.product_stock = 0
if product.product_stock > 0:
# in stock
product.stock_status = 1
else:
# out stock
product.stock_status = 0
product.manage_stock = 1
product.regular_price = row[9]
product.product_style = 1
product.minimum_inventory_level = 2
product.maximum_inventory_level = 10
product.cost_of_good = row[11]
product.retail_price = row[10]
if row[12] is not None:
image = row[12].split(" | ")
product.product_images = image
product.product_image = product.product_images[0]
if product.manage_stock == 1:
# simple
self.product_simples.append(product)
else:
# variable
self.product_variables.append(product)
def get_product_list(self):
return self.product_simples + self.product_variables
def get_product_variable_id(self, product_sku):
result = None
for product in self.product_variables:
if product.product_sku.upper() == product_sku.upper():
result = product.id
break
return result
def update_product_variable(self, product_parents):
for product in self.product_variables:
for group in product_parents:
if product.product_sku == group["parent_sku"]:
product.regular_price = group["regular_price"]
if group["stock"] > 0:
# in stock
product.stock_status = 1
else:
# out stock
product.stock_status = 0
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\product.sql"
with open(file_name, mode="w", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_Product ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_Product;\n")
wf.write("\n")
index = 0
for product in self.get_product_list():
index += 1
if product.product_title is not None:
product.product_title = product.product_title.replace("'", "''")
if product.product_content is not None:
product.product_content = product.product_content.replace("\n", "").replace("'", "''")
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_Product("
sql_text += " ID"
sql_text += ", CategoryID"
sql_text += ", ProductOldID"
sql_text += ", ProductTitle"
sql_text += ", ProductContent"
sql_text += ", ProductSKU"
sql_text += ", ProductStock"
sql_text += ", StockStatus"
sql_text += ", ManageStock"
sql_text += ", Regular_Price"
sql_text += ", CostOfGood"
sql_text += ", Retail_Price"
sql_text += ", ProductImage"
sql_text += ", ProductType"
sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
sql_text += ", Materials"
sql_text += ", MinimumInventoryLevel"
sql_text += ", MaximumInventoryLevel"
sql_text += ", SupplierID"
sql_text += ", SupplierName"
sql_text += ", ProductStyle"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(product.id)
sql_text += ", " + CommonSql.f_str_value(product.category_id)
sql_text += ", " + CommonSql.f_str_value(product.product_old_id)
sql_text += ", " + CommonSql.f_str_value(product.product_title)
sql_text += ", " + CommonSql.f_str_value(product.product_content)
sql_text += ", " + CommonSql.f_str_value(product.product_sku)
sql_text += ", " + CommonSql.f_str_value(product.product_stock)
sql_text += ", " + CommonSql.f_str_value(product.stock_status)
sql_text += ", " + CommonSql.f_str_value(product.manage_stock)
sql_text += ", " + CommonSql.f_str_value(product.regular_price)
sql_text += ", " + CommonSql.f_str_value(product.cost_of_good)
sql_text += ", " + CommonSql.f_str_value(product.retail_price)
sql_text += ", " + CommonSql.f_str_value(product.product_image)
sql_text += ", " + CommonSql.f_str_value(product.product_type)
sql_text += ", " + CommonSql.f_str_value(product.is_hidden)
sql_text += ", " + CommonSql.f_str_value(product.created_date)
sql_text += ", " + CommonSql.f_str_value(product.created_by)
sql_text += ", " + CommonSql.f_str_value(product.modified_date)
sql_text += ", " + CommonSql.f_str_value(product.modified_by)
sql_text += ", " + CommonSql.f_str_value(product.materials)
sql_text += ", " + CommonSql.f_str_value(product.minimum_inventory_level)
sql_text += ", " + CommonSql.f_str_value(product.maximum_inventory_level)
sql_text += ", " + CommonSql.f_str_value(product.supplier_id)
sql_text += ", " + CommonSql.f_str_value(product.supplier_name)
sql_text += ", " + CommonSql.f_str_value(product.product_style)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_Product OFF;\n")
<file_sep>/controllers/excel_controller.py
from openpyxl import load_workbook
class ExcelController:
def __init__(self):
raise Exception("Class ExcelController khong the khoi tao")
@staticmethod
def get_data(file_name, sheet_name):
if file_name == '':
raise Exception("Chua cho thong tin file excel")
try:
wb = load_workbook(file_name, read_only=True)
ws = wb[sheet_name]
index = 0
data = []
for row in ws.rows:
index += 1
# Khong doc dong title
if index == 1:
continue
data_row = []
for cell in row:
data_row.append(cell.value)
data.append(data_row)
return data
except Exception as e:
print(e)
<file_sep>/controllers/product_image_controller.py
import os
from common.sql import CommonSql
from models import ProductImageEntity
class ProductImageController:
def __init__(self, product_controller):
self.product_images = []
self.__mapping(product_controller)
def __mapping(self, product_controller):
index = 0
for product in product_controller.get_product_list():
if product.product_images is not None:
for image in product.product_images:
index += 1
product_image = ProductImageEntity()
product_image.id = index
product_image.product_id = product.id
product_image.product_image = image
product_image.is_hidden = product.is_hidden
product_image.created_date = product.created_date
product_image.created_by = product.created_by
product_image.modified_date = product.modified_date
product_image.modified_by = product.modified_by
self.product_images.append(product_image)
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\product_image.sql"
with open(file_name, mode="w+", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_ProductImage ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_ProductImage;\n")
wf.write("\n")
index = 0
for product_image in self.product_images:
index += 1
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_ProductImage("
sql_text += " ID"
sql_text += ", ProductID"
sql_text += ", ProductImage"
sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(product_image.id)
sql_text += ", " + CommonSql.f_str_value(product_image.product_id)
sql_text += ", " + CommonSql.f_str_value(product_image.product_image)
sql_text += ", " + CommonSql.f_str_value(product_image.is_hidden)
sql_text += ", " + CommonSql.f_str_value(product_image.created_date)
sql_text += ", " + CommonSql.f_str_value(product_image.created_by)
sql_text += ", " + CommonSql.f_str_value(product_image.modified_date)
sql_text += ", " + CommonSql.f_str_value(product_image.modified_by)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_ProductImage OFF;\n")
<file_sep>/models/product_entity.py
from datetime import datetime
class ProductEntity:
def __init__(self):
self.id = None
self.category_id = None
self.product_old_id = None
self.product_title = None
self.product_content = None
self.product_sku = None
self.product_stock = 0
self.stock_status = 0
self.manage_stock = 1
self.regular_price = 0
self.cost_of_good = 0
self.retail_price = 0
self.product_image = None
self.product_type = 0
self.is_hidden = 0
self.created_date = datetime.now()
self.created_by = "admin"
self.modified_date = None
self.modified_by = None
self.materials = None
self.minimum_inventory_level = None
self.maximum_inventory_level = None
self.supplier_id = None
self.supplier_name = None
self.product_style = None
# Dùng cho insert tabel ProductImage
self.product_images = None<file_sep>/controllers/category_controller.py
import os
from common.sql import CommonSql
from controllers import ExcelController
from models import CategoryEntity
class CategoryController:
def __init__(self, file_name, sheet_name):
self.__file_name = file_name
self.__sheet_name = sheet_name
self.categories = []
self.__mapping()
def __mapping(self):
data = ExcelController.get_data(self.__file_name, self.__sheet_name)
for row in data:
category = CategoryEntity()
category.id = row[0]
category.category_name = row[1]
category.category_description = row[2]
category.category_level = row[3]
category.parent_id = row[4]
category.is_hidden = row[5]
self.categories.append(category)
def get_category_id(self, category_name):
result = None
for category in self.categories:
if category.category_name.upper() == category_name.upper():
result = category.id
break
return result
def export_sql(self):
file_name = os.getcwd() + "\\export_sql\\category.sql"
with open(file_name, mode="w", encoding="utf-8") as wf:
wf.write("SET IDENTITY_INSERT dbo.tbl_Category ON;\n")
wf.write("\n")
wf.write("DELETE FROM dbo.tbl_Category;\n")
wf.write("\n")
index = 0
for category in self.categories:
index += 1
sql_text = ""
sql_text += "INSERT INTO dbo.tbl_Category("
sql_text += " ID"
sql_text += ", CategoryName"
sql_text += ", CategoryDescription"
sql_text += ", CategoryLevel"
sql_text += ", ParentID"
sql_text += ", IsHidden"
sql_text += ", CreatedDate"
sql_text += ", CreatedBy"
sql_text += ", ModifiedDate"
sql_text += ", ModifiedBy"
sql_text += ") VALUES("
sql_text += " " + CommonSql.f_str_value(category.id)
sql_text += ", " + CommonSql.f_str_value(category.category_name)
sql_text += ", " + CommonSql.f_str_value(category.category_description)
sql_text += ", " + CommonSql.f_str_value(category.category_level)
sql_text += ", " + CommonSql.f_str_value(category.parent_id)
sql_text += ", " + CommonSql.f_str_value(category.is_hidden)
sql_text += ", " + CommonSql.f_str_value(category.create_date)
sql_text += ", " + CommonSql.f_str_value(category.create_by)
sql_text += ", " + CommonSql.f_str_value(category.modified_date)
sql_text += ", " + CommonSql.f_str_value(category.modified_by)
sql_text += ");\n"
wf.write(sql_text)
if index > 100:
wf.write("GO\n")
index = 0
wf.write("SET IDENTITY_INSERT dbo.tbl_Category OFF;\n")
<file_sep>/models/product_variable_value_entity.py
from datetime import datetime
class ProductVariableValueEntity:
def __init__(self):
self.id = None
self.product_variable_id = None
self.product_variable_sku = None
self.variable_value_id = None
self.variable_name = None
self.variable_value = None
self.is_hidden = 0
self.created_date = datetime.now()
self.created_by = "admin"
self.modified_date = None
self.modified_by = None
<file_sep>/models/variable_value_entity.py
from datetime import datetime
class VariableValueEntity:
def __init__(self):
self.id = None
self.variable_id = None
self.variable_name = None
self.variable_value = None
self.is_hidden = 0
self.create_date = datetime.now()
self.create_by = "admin"
self.modified_date = None
self.modified_by = None
self.variable_value_text = None
self.sku_text = None
| 92d87e118f7dd341955095669ea019f972d9d769 | [
"Python"
] | 20 | Python | tranthaibinh111/MySqlToSqlServer | e7185891ac01c340f15b13e4ec064da0cce1b71d | 9d068daf29e301b99c33d40a7c2b8d7f26288243 | |
refs/heads/master | <repo_name>gotoeveryone/timoney<file_sep>/app/controllers/app.go
package controllers
import (
"timoney/app/forms"
"timoney/app/models"
"github.com/revel/revel"
)
// App 共通コントローラ
type App struct {
*revel.Controller
models.Transactional
}
// Index 初期表示
func (c App) Index() revel.Result {
form := forms.LoginForm{}
return c.Render(form)
}
// Login ログイン
func (c App) Login(form forms.LoginForm) revel.Result {
revel.INFO.Println(form)
form.Validate(c.Validation, c.Request.Locale)
if c.Validation.HasErrors() {
c.Validation.Keep()
c.FlashParams()
return c.Redirect(App.Index)
}
// ログイン成功
return c.Redirect(Tradings.Index)
}
<file_sep>/app/models/app.go
package models
import "time"
// Base 基底モデル
type Base struct {
ID uint `gorm:"primary_key" json:"id"`
}
// TimestampBase タイムスタンプ付き基底モデル
type TimestampBase struct {
Base
CreatedAt time.Time `gorm:"column:created;type:datetime"`
UpdatedAt time.Time `gorm:"column:modified;type:datetime"`
}
// Account 科目
type Account struct {
Base
Name string `json:"name"`
}
// Trading 取引
type Trading struct {
TimestampBase
AccountID int `json:"-"`
Account *Account `gorm:"ForeignKey:AccountID" json:"account"`
Traded *time.Time `json:"traded;type:date"`
Name *string `json:"name"`
Means *string `json:"means"`
PaymentDueDate *time.Time `json:"paymentDueDate;type:date"`
Summary *string `json:"summary"`
Suppliers *string `json:"suppliers"`
Payment int `json:"payment"`
DistributionRatios *int8 `json:"distributionRatio"`
}
// TradingMean 取引手段
type TradingMean struct {
Base
Name string `json:"name"`
}
// FavoriteTrading よく使う取引
type FavoriteTrading struct {
TimestampBase
Keyword string
AccountID int `json:"-"`
TradingMeanID int `json:"-"`
Summary *string
Suppliers *string
Payment *int
DistributionRatios *int8
Created *time.Time
Modified *time.Time
Account Account `gorm:"ForeignKey:AccountID" json:"account"`
TradingMean TradingMean `gorm:"ForeignKey:TradingMeanId" json:"trading"`
}
<file_sep>/app/views/Tradings/Index.html
{{set . "title" "取引一覧"}}
{{template "header.html" .}}
<form action="{{ url "Tradings.Save" }}" method="POST">
{{template "flash.html" .}}
<!-- <input type="hidden" name="csrf_token" value="{{ ._csrftoken }}" /> -->
{{ with $field := field "form.Name" .}}
<label>名前</label><input type="text" name="{{ $field.Name }}" value="あああああ">
{{ end }}
{{ with $field := field "form.AccountID" .}}
<label>勘定科目</label><input type="number" name="{{ $field.Name }}" value="1">
{{ end }}
{{ with $field := field "form.Traded" .}}
<label>取引日</label><input type="text" name="{{ $field.Name }}" value="2017/7/10">
{{ end }}
{{ with $field := field "form.Payment" .}}
<label>金額</label><input type="number" name="{{ $field.Name }}" value="100">
{{ end }}
<button>登録</button>
</form>
{{range .tradings}}
<div>{{.Traded}} - {{firstof .Name "" }} - {{.Account.Name}}</div>
{{end}}
{{template "footer.html" .}}
<file_sep>/app/controllers/trading.go
package controllers
import (
"time"
"timoney/app/forms"
"timoney/app/models"
"github.com/revel/revel"
)
// Tradings 共通コントローラ
type Tradings struct {
*revel.Controller
models.Transactional
}
// API 初期表示
func (c Tradings) API() revel.Result {
tradings := []models.Trading{}
c.Txn.Preload("Account").Find(&tradings)
return c.RenderJSON(tradings)
}
// Index 初期表示
func (c Tradings) Index() revel.Result {
tradings := []models.Trading{}
c.Txn.Preload("Account").Find(&tradings)
form := forms.TradingForm{}
return c.Render(form, tradings)
}
// Save 保存処理
func (c Tradings) Save(form forms.TradingForm) revel.Result {
revel.INFO.Println(form)
// バリデーションエラー
form.Validate(c.Validation, c.Request.Locale)
if c.Validation.HasErrors() {
c.Validation.Keep()
c.FlashParams()
return c.Redirect(Tradings.Index)
}
// 取引日の変換エラー
traded, err := time.Parse("2006/1/2", form.Traded)
if err != nil {
c.Validation.Error("日付が不正です。").Key("tradings.form.Traded.invalid")
c.Validation.Keep()
c.FlashParams()
return c.Redirect(Tradings.Index)
}
trading := models.Trading{
Name: &form.Name,
AccountID: form.AccountID,
Traded: &traded,
Payment: form.Payment,
}
if err := c.Txn.Create(&trading).Error; err != nil {
return c.RenderError(err)
}
return c.Redirect(Tradings.Index)
}
<file_sep>/README.md
# Timoney [ティモニー]
## 時は金なり
会計情報を管理するためのツールです。
GolangのフレームワークであるRevelを利用しています。
## 前提
以下がインストールされていること
- Golang
## セットアップ
- Revelのコマンドを取得
```sh
$ go get -u github.com/revel/cmd/revel
```
- depの取得
```sh
$ go get -u github.com/golang/dep/cmd/dep
```
- depの実行
```sh
$ dep ensure
```
## 実行
```sh
$ revel run timoney
```
<file_sep>/app/forms/app.go
package forms
import "github.com/revel/revel"
// LoginForm ログインフォーム
type LoginForm struct {
Account string
Password string
}
// Validate バリデーション
func (c LoginForm) Validate(v *revel.Validation, locale string) {
v.Check(c.Account, revel.ValidRequired(), revel.ValidMinSize(6))
v.Check(c.Password, revel.ValidRequired(), revel.ValidMinSize(6))
}
// TradingForm 取引入力フォーム
type TradingForm struct {
AccountID int
Traded string
Name string
Means string
PaymentDueDate string
Summary string
Suppliers string
Payment int
DistributionRatios int8
}
// Validate バリデーション
func (c TradingForm) Validate(v *revel.Validation, locale string) {
v.Check(c.AccountID, revel.ValidRequired())
v.Check(c.Traded, revel.ValidRequired(), revel.ValidMinSize(8), revel.ValidMaxSize(10))
v.Check(c.Payment, revel.ValidRequired(), revel.ValidRange(1, 9999999))
}
<file_sep>/app/models/config.go
package models
import (
"encoding/json"
"fmt"
"io/ioutil"
"github.com/revel/revel"
)
type (
// AppConfig 設定
AppConfig struct {
Redis redis `json:"redis"`
DB db `json:"db"`
Mail mail `json:"mail"`
}
// Redis接続設定
redis struct {
Host string `json:"host"`
Port int `json:"port"`
Auth string `json:"auth"`
}
// データベース接続設定
db struct {
Name string `json:"name"`
Host string `json:"host"`
Port int `json:"port"`
User string `json:"user"`
Password string `json:"<PASSWORD>"`
}
// メール接続設定
mail struct {
SMTP string `json:"smtp"`
Port int `json:"port"`
User string `json:"user"`
Password string `json:"<PASSWORD>"`
From string `json:"from"`
FromAlias string `json:"fromAlias"`
To []string `json:"to"`
}
)
// LoadConfig アプリケーション設定をJSONファイルから読み込む
func LoadConfig(config *AppConfig) {
jsonValue, err := ioutil.ReadFile(fmt.Sprintf("%s/config.json", revel.BasePath))
if err != nil {
revel.ERROR.Fatalln(err)
}
if err := json.Unmarshal(jsonValue, &config); err != nil {
revel.ERROR.Fatalln(err)
}
}
<file_sep>/app/models/gorm.go
package models
import (
"database/sql"
"fmt"
_ "github.com/go-sql-driver/mysql"
"github.com/jinzhu/gorm"
"github.com/revel/revel"
)
var (
// データベース接続用インスタンス
dbManager *gorm.DB
)
// InitDB テーブル初期化
func InitDB() {
// 設定ファイル読み出し
config := AppConfig{}
LoadConfig(&config)
revel.INFO.Println(config)
var err error
dsn := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?charset=utf8&parseTime=True&loc=%s",
config.DB.User,
config.DB.Password,
config.DB.Host,
config.DB.Port,
config.DB.Name,
"Asia%2FTokyo",
)
dbManager, err = gorm.Open("mysql", dsn)
if err != nil {
panic(err)
}
dbManager.LogMode(true)
revel.INFO.Println("Connected to database")
}
// Transactional トランザクション
type Transactional struct {
*revel.Controller
Txn *gorm.DB
}
// Begin トランザクション開始
func (c *Transactional) Begin() revel.Result {
revel.TRACE.Println("トランザクションを開始します。")
tx := dbManager.Begin()
if err := tx.Error; err != nil {
panic(err)
}
c.Txn = tx
return nil
}
// Commit トランザクションのコミット
func (c *Transactional) Commit() revel.Result {
if c.Txn == nil {
return nil
}
c.Txn.Commit()
if err := c.Txn.Error; err != nil && err != sql.ErrTxDone {
panic(err)
}
revel.TRACE.Printf("トランザクションをコミットしました。!")
c.Txn = nil
return nil
}
// Rollback トランザクションのロールバック
func (c *Transactional) Rollback() revel.Result {
if c.Txn == nil {
return nil
}
c.Txn.Rollback()
if err := c.Txn.Error; err != nil && err != sql.ErrTxDone {
panic(err)
}
revel.ERROR.Printf("トランザクションをロールバックしました。")
c.Txn = nil
return nil
}
| f26c6463fadcc27899331f50f92c3dc9c8704aa9 | [
"Markdown",
"Go",
"HTML"
] | 8 | Go | gotoeveryone/timoney | 520073052169dda4e0eb08d0aa622814316f1cb6 | bb1ff1720249700d6f088cda411a6558320fbac9 | |
refs/heads/master | <file_sep># yuekao
4月份月考
<file_sep>require(['main'],function(){
require(['mui'],function(mui){
click();
function click(){
var button=document.querySelector('button');
mui('section').on('tap','button',function(){
//获取数据
var fen=document.querySelector('.fen');
var lis=[...fen.querySelectorAll('input')];
var fi=document.querySelector('.fi');
var pr=document.querySelector('.pr');
//console.log(fi.value.trim(),lis[0].value.trim())
mui.ajax('/api/creat',{
data:{
uname:fi.value.trim(),
price:pr.value.trim(),
pin:lis[0].value.trim(),
fen:lis[1].value.trim(),
fu:lis[2].value.trim(),
add:lis[3].value.trim(),
shu:lis[4].value.trim(),
ke:lis[5].value.trim(),
yan:lis[6].value.trim(),
bei:lis[7].value.trim()
},
dataType:'json',//服务器返回json格式数据
type:'post',//HTTP请求类型
timeout:10000,//超时时间设置为10秒;
success:function(data){
if(data.code==1){
alert("成功")
location.href='../index.html'
}else{
alert("失败")
}
}
});
})
}
})
})<file_sep>var gulp=require('gulp');
var webserver=require('gulp-webserver');
gulp.task('web',function(){
return gulp.src('./src')
.pipe(webserver({
port:8989,
open:true,
livereload:true,
proxies:[
{source:"/api/xuan",target:"http://localhost:3000/api/xuan"},
{source:"/api/creat",target:"http://localhost:3000/api/creat"},
{source:"/api/found",target:"http://localhost:3000/api/found"}
]
}))
})
<file_sep>[{
"img": "img/i1.png",
"uname": "珠片绣订SX-1126",
"nei": "3D重手工",
"biao": "现货"
}, {
"img": "img/i2.png",
"uname": "珠片绣订SX-1126",
"nei": "3D重手工",
"biao": "供应"
}, {
"img": "img/i3.png",
"uname": "珠片绣订SX-1126",
"nei": "3D重手工",
"biao": "现货"
}, {
"img": "img/i4.png",
"uname": "珠片绣订SX-1126",
"nei": "3D重手工",
"biao": "供应"
}, {
"img": "img/i5.png",
"uname": "珠片绣订SX-1126",
"nei": "3D重手工",
"biao": "供应"
}]
[{
"uname": "1",
"img": "img/i4.png",
"price": 1,
"pin": "1",
"fen": "1",
"fu": "1",
"add": "1",
"shu": "1",
"yan": "1",
"bei": "1",
"ke": "1"
}]<file_sep>require(['main'], function() {
require(['mui'], function(mui) {
var con=0;
var page=1;
var limit=10;
var list = document.querySelector('.list');
mui.init({
pullRefresh: {
container: refreshContainer, //待刷新区域标识,querySelector能定位的css选择器均可,比如:id、.class等
up: {
contentrefresh: "正在加载...", //可选,正在加载状态时,上拉加载控件上显示的标题内容
contentnomore: '没有更多数据了', //可选,请求完毕若没有更多数据时显示的提醒内容;
callback: pullfresh //必选,刷新函数,根据具体业务来编写,比如通过ajax从服务器获取新数据;
}
}
});
xuan();
push();
search();
function pullfresh(){
setTimeout(function(){
if(con==0){
mui('#refreshContainer').pullRefresh().endPullupToRefresh(true);
}else{
mui('#refreshContainer').pullRefresh().endPullupToRefresh(false);
}
page++;
xuan();
},1000)
}
function xuan() {
mui.ajax('/api/xuan', {
data:{
page:page,
limit:limit
},
dataType: 'json', //服务器返回json格式数据
type: 'post', //HTTP请求类型
timeout: 10000, //超时时间设置为10秒;
success: function(data) {
con=data.data.length;
console.log(con);
//list.innerHTML.innerHTML='';
list.innerHTML += data.data.map(function(item) {
return `<li>
<img src="${item.img}" alt="">
<h4>${item.uname}</h4>
<p><span>${item.nei}</span><span class="te">${item.biao}</span></p>
</li>`
}).join('')
}
});
}
function push(){
var bu=document.querySelector('.bu');
mui('.tex').on('tap','.bu',function(){
location.href='../push.html'
})
}
function search(){
var ji=document.querySelector('.ji');
ji.oninput=function(){
console.log(this.value);
var val=this.value.trim();
mui.ajax('/api/found',{
data:{
uname:val
},
dataType:'json',//服务器返回json格式数据
type:'post',//HTTP请求类型
timeout:10000,//超时时间设置为10秒;
success:function(data){
console.log(data);
var cha=document.querySelector('.cha');
console.log(cha);
cha.style.display="block";
cha.innerHTML=data.data.map(function(item){
return `<li>
<p>工艺商品:${item.uname}</p>
<p>价格:${item.price}</p>
<p>${item.pin}</p>
<p>${item.fen}</p>
<p>${item.fu}</p>
<p>${item.add}</p>
<p>${item.shu}</p>
<p>${item.yan}</p>
<p>${item.bei}</p>
</li>
`
}).join('');
}
});
}
}
})
})
| e11a38f1ffc40cac42dffdab87e7286292e364b4 | [
"Markdown",
"JavaScript"
] | 5 | Markdown | wyx2351881773/yuekao | e6e9d494610f3a466b83daa13d5db5a8363eea52 | 007b9000ec32d5933c3fce9900cd9bf8741ff6de | |
refs/heads/master | <repo_name>foonpcf/django-nextpage<file_sep>/README.rst
===============
Django NextPage
===============
django-nextpage is modified from django-pagination, but only previous and next page is provided.
Installation
============
Add `nextpage` to `INSTALLED_APPS`, like:
INSTALLED_APPS = (
# ...
'nextpage',
)
and `TEMPLATE_CONTEXT_PROCESSORS` should have `django.core.context_processors.request`, like:
("django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request")
Usage
=====
Just like `django-pagination`, in fact it designed as a drop-in replacement. just load `next` templatetag
{% load nextpage %}
{% autopaginate object_list 20 %}
{% paginate %}
<file_sep>/setup.py
from setuptools import setup, find_packages
version = '0.1'
LONG_DESCRIPTION = """
Usage just like django-pagination but only next and previous page is provided.
"""
setup(
name='django-nextpage',
version=version,
description="django-nextpage",
long_description=LONG_DESCRIPTION,
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
],
keywords='pagination,django',
author='tzangms',
author_email='<EMAIL>',
url='http://github.com/tzangms/django-nextpage',
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| 47102e79120d2a0d3e5e8824053d572990cb23cf | [
"Python",
"reStructuredText"
] | 2 | reStructuredText | foonpcf/django-nextpage | 62d70ca03d57d7e1b07ae524f82149154ae53fa3 | a1540ef8cca7725e98bd7e3b962a7f63bcd50776 | |
refs/heads/master | <repo_name>chankei613/ajsai_official_blog_theme<file_sep>/page.php
<?php
// ヘッダーを取得
get_header();
// コンテンツを取得
if(have_posts()): while(have_posts()): the_post();
the_content();
endwhile; endif; wp_reset_postdata();
// フッターを取得
get_footer();
?><file_sep>/sidebar.php
<aside class="l-sidebar">
<?php dynamic_sidebar( 'sidebar-1' ); ?>
</aside><file_sep>/template/parts/header-image.php
<div class="m-mainVisual">
<?php the_custom_header_markup();?>
</div><file_sep>/functions.php
<?php
/*
* セットアップ
*/
function ajsai_setup(){
/*
* アイキャッチ画像をサポート
*/
add_theme_support( 'post-thumbnails' );
/*
* FEEDのリンク RSSにフィードが登録できるようになる
*/
add_theme_support( 'automatic-feed-links' );
/* 投稿フォーマットをサポート
* CSSによるみための変更:post_class()で format-{} のクラス名を出力可能
* PHPごと変更: get_post_format() フォーマットを確認し、条件分岐に使うことができる
*
*/
add_theme_support( 'post-formats', array(
'aside',
'image',
'video',
'quote',
'link',
));
/*
* コアから出力されるHTMLをHTML5にフォーマット
*/
add_theme_support( 'html5', array(
'search-form',
'comment-form',
'comment-list',
'gallery',
'caption',
));
/*
* カスタムヘッダーをアクティブに
* flex-height, flex-widthをtrueにした場合は、getして出力できる<img src="<?php header_image(); ?>" height="<?php echo get_custom_header()->height; ?>" width="<?php echo get_custom_header()->width; ?>" alt="" />
*/
$defaults = array(
'default-image' => get_template_directory_uri() . '/images/header.jpg', // 何もない状態のデフォルトの画像をセットする
'random-default' => false,
'flex-height' => false, // フレキシブルなヘッダー使用時はtrueにして、heightの項目を設定
'height' => 0,
'flex-width' => false,
'width' => 0,
'default-text-color' => '',
'header-text' => true,
'uploads' => true,
'wp-head-callback' => '',
'admin-head-callback' => '',
'admin-preview-callback' => '',
);
add_theme_support( 'custom-header', $defaults );
/*
* カスタムロゴをアクティブに
* 管理画面からロゴ画像を設定できる
*/
add_theme_support( 'custom-logo', array(
'height' => 100,
'width' => 300,
'flex-height' => true,
'flex-width' => true,
'header-text' => array( 'site-title', 'site-description' ),
));
/*
* wp_head()で出力されるタイトルタグをいい感じにする
*
*/
add_theme_support( 'title-tag' );
/*
* テーマカスタマイザーを使用した時に、ウィジェット設置時にリロードなしで見た目の変更を確認できる。
*
*/
add_theme_support( 'customize-selective-refresh-widgets' );
/*
* wp_nav_menu()をアクティブにする
* 管理画面に「メニュー」の項目を追加
*/
register_nav_menu( 'header-navigation', 'Header Navigation' );
$starter_content = array(
'widgets' => array(
// ウィジェットを登録
'top-right' => array(
'id' => 'sidebar-1',
'before_widget' => '<aside class="widget %2$s">',
'after_widget' => '</aside>',
'before_title' => '<h3 class="widget-title">',
'after_title' => '</h3>',
),
'footer-left' => array(
'id' => 'sidebar-2',
'before_widget' => '<div class="widget %2$s">',
'after_widget' => '</div>',
'before_title' => '<p class="widget-title">',
'after_title' => '</p>',
),
'footer-center' => array(
'id' => 'sidebar-3',
'before_widget' => '<div class="widget %2$s">',
'after_widget' => '</div>',
'before_title' => '<p class="widget-title">',
'after_title' => '</p>',
),
'footer-right' => array(
'id' => 'sidebar-4',
'before_widget' => '<div class="widget %2$s">',
'after_widget' => '</div>',
'before_title' => '<p class="widget-title">',
'after_title' => '</p>',
),
),
// 固定ページを定義する。
'posts' => array(
'front',
'about',
'contact',
'blog',
'homepage-section',
),
// フロントページをトップページ設定する、
'options' => array(
'show_on_front' => 'page',
'page_on_front' => '{{front}}',
'page_for_posts' => '{{blog}}',
),
// Set the front page section theme mods to the IDs of the core-registered pages.
'theme_mods' => array(
'panel_1' => '{{homepage-section}}',
'panel_2' => '{{about}}',
'panel_3' => '{{blog}}',
'panel_4' => '{{contact}}',
),
// Set up nav menus for each of the two areas registered in the theme.
'nav_menus' => array(
// Assign a menu to the "top" location.
'top' => array(
'name' => __( 'Top Menu', 'twentyseventeen' ),
'items' => array(
'link_home', // Note that the core "home" page is actually a link in case a static front page is not used.
'page_about',
'page_blog',
'page_contact',
),
),
// Assign a menu to the "social" location.
'social' => array(
'name' => __( 'Social Links Menu', 'twentyseventeen' ),
'items' => array(
'link_yelp',
'link_facebook',
'link_twitter',
'link_instagram',
'link_email',
),
),
),
);
add_theme_support( 'starter-content', $starter_content );
}
add_action( 'after_setup_theme', 'ajsai_setup' );
/**
* Load scripts and style sheets
* WordPressのコアにあるjQueryを読み込む
*/
function load_scripts(){
wp_enqueue_script(
'init', // ハンドル名
get_template_directory_uri() . '/assets/js/init.min.js', // ソース
array( 'jquery' ), // 先に読み込まれているべきScript(ハンドル名)
filemtime( get_template_directory() . '/assets/js/init.min.js' ), // バージョン情報
false // Bodyタグの最後でロードしますか?
);
}
add_action( 'wp_enqueue_scripts', 'load_scripts' );
/**
* Register our sidebars and widgetized areas.
* ウィジェットに対応
*/
function arphabet_widgets_init() {
register_sidebar(
array(
'name' =>'トップページ みぎ',
'id' => 'sidebar-1',
'before_widget' => '<aside class="widget %2$s">',
'after_widget' => '</aside>',
'before_title' => '<h3 class="widget-title">',
'after_title' => '</h3>',
)
);
register_sidebar(
array(
'name' =>'フッター ひだり',
'id' => 'sidebar-2',
'before_widget' => '<div class="widget %2$s">',
'after_widget' => '</div>',
'before_title' => '<p class="widget-title">',
'after_title' => '</p>',
)
);
register_sidebar(
array(
'name' =>'フッター 中央',
'id' => 'sidebar-3',
'before_widget' => '<div class="widget %2$s">',
'after_widget' => '</div>',
'before_title' => '<p class="widget-title">',
'after_title' => '</p>',
)
);
register_sidebar(
array(
'name' => 'フッター みぎ',
'id' => 'sidebar-4',
'before_widget' => '<div class="widget %2$s">',
'after_widget' => '</div>',
'before_title' => '<p class="widget-title">',
'after_title' => '</p>',
)
);
}
add_action( 'widgets_init', 'arphabet_widgets_init' );
/*
* 記事タイトルの文字数制限
*
*/
add_filter('the_title', 'my_the_title', 10, 2);
function my_the_title($title, $id) {
$title = mb_strimwidth($title, 0, 40, "…","UTF-8");
return $title;
}
/*
* 記事本文抜粋表示
* 使用方法:the_excerpt();
*/
function new_excerpt_mblength($length) {
// 100文字制限
return 60;
// モバイルの際に出しわけ
// return ( wp_is_mobile() ) ? 35 : 140;
}
add_filter('excerpt_mblength', 'new_excerpt_mblength');
// 抜粋後の文末につける文字
function new_excerpt_more($more) {
return '...';
}
add_filter('excerpt_more', 'new_excerpt_more');
/*
* 固定ページで現在のページ数が無視されるのを回避
* ページネーションがバグを起こさないようにするため
*/
add_action( 'parse_query', 'my_parse_query' );
function my_parse_query( $query ) {
if ( ! isset( $query->query_vars['paged'] ) && isset( $query->query_vars['page'] ) )
$query->query_vars['paged'] = $query->query_vars['page'];
}
/*
* pre_get_posts
* メインクエリを一時的に使ってループ
* カテゴリなどの自動取得にはこちらの関数が便利 : get_queried_object()
*/
add_action( 'pre_get_posts', 'my_pre_get_posts' );
function my_pre_get_posts( $query ) {
if ( is_admin() || ! $query -> is_main_query() ) return;
// is_~の条件分岐タグを使ってページごとの設定をする
// if ( $query -> is_home() ) {
// $paged = get_query_var('paged');
// $query -> set( 'posts_per_page', '' );
// $query -> set( 'paged', $paged );
// }
}
<file_sep>/header.php
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="stylesheet" href="<?php echo get_template_directory_uri(); ?>/assets/css/style.min.css">
<?php wp_head(); ?>
</head>
<body <?php body_class(); ?>>
<header class="l-header">
<div class="l-header__main">
<?php
$site_head = get_custom_logo();
if( empty( $site_head ) ){
$site_head = get_bloginfo( 'name' );
}
echo "<h1 class=\"siteName\">".$site_head."</h1>";
$description = get_bloginfo( 'description', 'display' );
if ( $description || is_customize_preview() ){
echo "<p class=\"siteDesc\">".$description."</p>";
}
?>
</div>
<?php
if( is_front_page() ){
require(dirname(__FILE__).'/template/parts/header-image.php');
}?>
<?php
$setting = array(
'menu_class' => 'main-menu', //メニューにclassを付与
'container' => 'nav',
'container_class' => 'l-nav', //コンテナにclassを付与
'fallback_cb' => 'wp_page_menu', //theme_locationで指定したメニューが見つからない場合に実行fallback_cb
'echo' => true,
'depth' => 0, //メニュー階層を指定、※0の場合は全階層
'items_wrap' => '<ul class="%2$s">%3$s</ul>',
);
wp_nav_menu( $setting );
?>
</header>
<file_sep>/README.md
# ajsai_official_blog_theme
WordPress official theme for blogger
<file_sep>/index.php
<?php get_header();?>
<div class="l-main -column2">
<main class="l-mainContent">
<?php if ( have_posts() ) :?>
<div class="l-card">
<?php while ( have_posts() ) :the_post();?>
<article class="l-card__content"><a href="<?php the_permalink();?>">
<div class="l-card__thumbnail"><?php the_post_thumbnail();?></div>
<div class="l-card__main">
<time class="l-card__date" datetime="<?php the_time('Y-m-d');?>"><?php the_time('Y/m/d');?></time>
<h2 class="l-card__title"><?php the_title();?></h2>
<div class="l-card__briefing">
<?php the_excerpt();?>
</div>
</div>
</a></article>
<?php endwhile;?>
</div>
<?php endif;?>
</main>
<?php get_sidebar();?>
</div>
<?php get_footer();?><file_sep>/footer.php
<footer class="l-footer">
<div class="l-footer__column3">
<?php
if ( is_active_sidebar( 'sidebar-2' ) ){
dynamic_sidebar( 'sidebar-2' );
}
if ( is_active_sidebar( 'sidebar-3' ) ){
dynamic_sidebar( 'sidebar-3' );
}
if ( is_active_sidebar( 'sidebar-4' ) ){
dynamic_sidebar( 'sidebar-4' );
}
?>
</div>
<small>© Hydrangea</small>
</footer>
<?php wp_footer(); ?>
</body>
</html><file_sep>/single.php
<?php get_header();?>
<div class="l-main -column2">
<main class="l-mainContent">
<article class="l-postContent">
<?php if ( have_posts() ) : while ( have_posts() ) :the_post();
$post_cats = get_the_category();
$post_tags = get_the_tags();
?>
<header class="l-postContent__head">
<h1><?php the_title();?></h1>
<div class="l-postContent__add">
<?php
if( !empty($post_cats) ){
foreach( $post_cats as $cats ){
echo '<a href="'. get_category_link($cats->term_id) .'" class="l-postContent__category">'.$cats->name.'</a>';
}
}?>
<time datetime="<?php the_time('Y-m-d');?>"><?php the_time('Y/m/d');?></time>
</div>
<?php
if( !empty($post_tags) ){
echo '<ul class="l-postContent__tag">';
foreach( $post_tags as $tags ){
echo '<li><a href="'. get_tag_link($tags->term_id) .'">' .$tags->name. '</a></li>';
}
echo '</ul>';
}?>
</header>
<div class="l-postContent__thumbnail"><?php the_post_thumbnail();?></div>
<div class="l-postContent__main">
<?php the_content();?>
</div>
<?php endwhile; endif;?>
<footer class="l-postContent__footer">
</footer>
</article>
</main>
<?php get_sidebar();?>
</div>
<?php get_footer();?> | de9729808ee38b11fdc8e0dfe2674f6a569fd04c | [
"Markdown",
"PHP"
] | 9 | PHP | chankei613/ajsai_official_blog_theme | cd117140189f2cbbf8ec1387919a3f797859a0e7 | af8e945e0794a634a75b54f1c3cac28bcc37e99b | |
refs/heads/master | <repo_name>TomasHansson/MediaIntegrator<file_sep>/Laboration4/Form1.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Windows.Forms;
using System.Xml;
using System.Xml.Linq;
namespace Laboration4
{
public partial class MainForm : Form
{
public MainForm()
{
InitializeComponent();
}
private void StartIntegrationButton_Click(object sender, EventArgs e)
{
integrationInProgressLabel.Visible = true;
string directory = Directory.GetCurrentDirectory() + @"\frMediaShop";
FileSystemWatcher fileSystemWatcher = new FileSystemWatcher()
{
Path = directory,
Filter = "*.csv",
NotifyFilter = NotifyFilters.CreationTime | NotifyFilters.LastWrite
};
fileSystemWatcher.Created += FileCreatedOrChanged;
fileSystemWatcher.Changed += FileCreatedOrChanged;
fileSystemWatcher.EnableRaisingEvents = true;
string reverseDirectory = Directory.GetCurrentDirectory() + @"\frSimpleMedia";
FileSystemWatcher reverseFileSystemWatcher = new FileSystemWatcher()
{
Path = reverseDirectory,
Filter = "*.xml",
NotifyFilter = NotifyFilters.CreationTime | NotifyFilters.LastWrite
};
reverseFileSystemWatcher.Created += ReverseFileCreatedOrChanged;
reverseFileSystemWatcher.Changed += ReverseFileCreatedOrChanged;
reverseFileSystemWatcher.EnableRaisingEvents = true;
}
private void FileCreatedOrChanged(object sender, FileSystemEventArgs e)
{
CreateXMLFromCsv();
}
private void ReverseFileCreatedOrChanged(object sender, FileSystemEventArgs e)
{
CreateCsvFromXML();
}
private void CreateXMLFromCsv()
{
string CSVFilePath = Directory.GetCurrentDirectory() + @"\frMediaShop\Products.csv";
string XMLFilePath = Directory.GetCurrentDirectory() + @"\tillSimpleMedia\Products.xml";
XmlWriterSettings xmlWriterSettings = new XmlWriterSettings()
{
Indent = true
};
XmlWriter xmlWriter = XmlWriter.Create(XMLFilePath, xmlWriterSettings);
xmlWriter.WriteStartDocument();
xmlWriter.WriteStartElement("Inventory");
using (StreamReader streamReader = new StreamReader(CSVFilePath))
{
string line;
string[] lineData;
while ((line = streamReader.ReadLine()) != null)
{
lineData = line.Split(';');
xmlWriter.WriteStartElement("Item");
xmlWriter.WriteStartElement("Name");
xmlWriter.WriteString(lineData[1]);
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Count");
xmlWriter.WriteString(lineData[6]);
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Price");
xmlWriter.WriteString(lineData[2]);
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Comment");
xmlWriter.WriteString("");
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Artist");
xmlWriter.WriteString(lineData[4]);
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Publisher");
xmlWriter.WriteString(lineData[5]);
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Genre");
xmlWriter.WriteString("");
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("Year");
xmlWriter.WriteString("0");
xmlWriter.WriteEndElement();
xmlWriter.WriteStartElement("ProductID");
xmlWriter.WriteString(lineData[0]);
xmlWriter.WriteEndElement();
xmlWriter.WriteEndElement(); // </Item>
}
}
xmlWriter.WriteEndElement(); // </Inventory>
xmlWriter.Close();
}
private void CreateCsvFromXML()
{
string XMLFilePath = Directory.GetCurrentDirectory() + @"\frSimpleMedia\Products.xml";
string CSVFilePath = Directory.GetCurrentDirectory() + @"\tillMediaShop\Products.csv";
XDocument xDocument = XDocument.Load(XMLFilePath);
using (StreamWriter streamWriter = new StreamWriter(CSVFilePath))
{
foreach (XElement item in xDocument.Descendants("Item"))
{
List<XElement> elements = item.Descendants().ToList();
string product = elements.First(x => x.Name == "ProductID").Value + ";";
product += elements.First(x => x.Name == "Name").Value + ";";
product += elements.First(x => x.Name == "Price").Value + ";";
product += "Ospecifierad;"; // Det finns ingen motsvarighet till varutyp från SimpleMedia.
product += elements.First(x => x.Name == "Artist").Value + ";";
product += elements.First(x => x.Name == "Publisher").Value + ";";
product += elements.First(x => x.Name == "Count").Value + ";";
product += "0"; // Det finns ingen motsvarighet till antal sålda från SimpleMedia.
streamWriter.WriteLine(product);
}
}
}
}
}
<file_sep>/README.md
# MediaIntegrator
A small project done during a C#/.NET course at Karlstads university.
A simple program ment to emulate a piece of software used as an integration tool between two
different mediashops that uses different types of files to store their products.
What I Learned:
- Using FileSystemWatcher to call functions when files are created or updated.
- Using XmlWriter to write XML-files.
- Using XDocument and XElement to read XML-files.
| 6037620ab9a04f24c65a3016cbc9d1bef90c7988 | [
"Markdown",
"C#"
] | 2 | C# | TomasHansson/MediaIntegrator | cfa963cd2656b1d586afc60e0359307f3eead2f2 | abdc6d7e538245a8fb8210bd65b4e453af6e07f2 | |
refs/heads/master | <repo_name>TomKauffeld/NES-Emulator<file_sep>/Emulator.core/src/emulator/car/core/Cartridge.h
#ifndef __EMULATOR_CAR_CORE_CARTRIDGE_H__
#define __EMULATOR_CAR_CORE_CARTRIDGE_H__
#include "CartridgeHeader.h"
#include <emulator/car/core/cartridge.h>
#include <emulator/car/mappers/mapperProvider.h>
#include <emulator/car/mappers/mapper.h>
#include <inttypes.h>
#define CARTRIDGE_MIRROR_HORIZONTAL 0x00
#define CARTRIDGE_MIRROR_VERTICAL 0x01
#define CARTRIDGE_MIRROR_ONESCREEN_LO 0x02
#define CARTRIDGE_MIRROR_ONESCREEN_HI 0x03
typedef struct cartridge {
CartridgeHeader header;
uint8_t trainer[512];
uint16_t mapper_id;
uint8_t* prg;
uint8_t* chr;
Mapper* mapper;
uint8_t mirror;
} Cartridge;
uint8_t cartridge_read_prg(Cartridge* cartridge, uint16_t addr);
void cartridge_write_prg(Cartridge* cartridge, uint16_t addr, uint8_t value);
uint8_t cartridge_read_chr(Cartridge* cartridge, uint16_t addr);
void cartridge_write_chr(Cartridge* cartridge, uint16_t addr, uint8_t value);
#endif<file_sep>/Emulator.core/src/emulator/ppu/core/palScreen.h
#ifndef __EMULATOR_PPU_CORE_PAL_SCREEN_H__
#define __EMULATOR_PPU_CORE_PAL_SCREEN_H__
#include <inttypes.h>
//#define to_color(r, g, b) ((r << 24) + (g << 16) + (b << 8) + 255)
#define to_color(r, g, b) ((255 << 24) + (b << 16) + (g << 8) + r)
uint32_t palScreen[0x40] = {
to_color(84, 84, 84),
to_color(0, 30, 116),
to_color(8, 16, 144),
to_color(48, 0, 136),
to_color(68, 0, 100),
to_color(92, 0, 48),
to_color(84, 4, 0),
to_color(60, 24, 0),
to_color(32, 42, 0),
to_color(8, 58, 0),
to_color(0, 64, 0),
to_color(0, 60, 0),
to_color(0, 50, 60),
to_color(0, 0, 0),
to_color(0, 0, 0),
to_color(0, 0, 0),
to_color(152, 150, 152),
to_color(8, 76, 196),
to_color(48, 50, 236),
to_color(92, 30, 228),
to_color(136, 20, 176),
to_color(160, 20, 100),
to_color(152, 34, 32),
to_color(120, 60, 0),
to_color(84, 90, 0),
to_color(40, 114, 0),
to_color(8, 124, 0),
to_color(0, 118, 40),
to_color(0, 102, 120),
to_color(0, 0, 0),
to_color(0, 0, 0),
to_color(0, 0, 0),
to_color(236, 238, 236),
to_color(76, 154, 236),
to_color(120, 124, 236),
to_color(176, 98, 236),
to_color(228, 84, 236),
to_color(236, 88, 180),
to_color(236, 106, 100),
to_color(212, 136, 32),
to_color(160, 170, 0),
to_color(116, 196, 0),
to_color(76, 208, 32),
to_color(56, 204, 108),
to_color(56, 180, 204),
to_color(60, 60, 60),
to_color(0, 0, 0),
to_color(0, 0, 0),
to_color(236, 238, 236),
to_color(168, 204, 236),
to_color(188, 188, 236),
to_color(212, 178, 236),
to_color(236, 174, 236),
to_color(236, 174, 212),
to_color(236, 180, 176),
to_color(228, 196, 144),
to_color(204, 210, 120),
to_color(180, 222, 120),
to_color(168, 226, 144),
to_color(152, 226, 180),
to_color(160, 214, 228),
to_color(160, 162, 160),
to_color(0, 0, 0),
to_color(0, 0, 0)
};
#endif<file_sep>/Emulator.core/src/emulator/Emulator.c
#include "Emulator.h"
#include <stdlib.h>
#include <string.h>
Emulator* emulator_init()
{
Emulator* emulator = (Emulator*)malloc(sizeof(Emulator));
if (emulator == NULL)
return NULL;
emulator->cpu_timer = 0;
emulator->cpu = cpu_init();
if (emulator->cpu == NULL)
{
free(emulator);
return NULL;
}
return emulator;
}
void emulator_destroy(Emulator* emulator)
{
if (emulator == NULL)
return;
cpu_destroy(emulator->cpu);
free(emulator);
}
Cartridge* emulator_insert_cartridge(Emulator* emulator, Cartridge* car)
{
return bus_insert_cartridge(emulator->cpu->bus, car);
}
Cartridge* emulator_remove_cartridge(Emulator* emulator)
{
return bus_remove_cartridge(emulator->cpu->bus);
}
void emulator_signal_reset(Emulator* emulator)
{
cpu_signal_reset(emulator->cpu);
}
void emulator_tick(Emulator* emulator)
{
emulator->cpu_timer++;
bus_tick(emulator->cpu->bus);
if (emulator->cpu_timer > 3)
{
cpu_clock(emulator->cpu);
emulator->cpu_timer = 0;
}
}
uint8_t emulator_get_cpu_bus(Emulator* emulator, uint16_t addr)
{
return cpu_bus_read(emulator->cpu, addr);
}
uint8_t emulator_get_ppu_bus(Emulator* emulator, uint16_t addr)
{
return ppu_bus_read(emulator->cpu->bus->ppu, addr);
}
uint16_t emulator_get_pc(Emulator* emulator)
{
return emulator->cpu->registery->pc;
}
uint8_t emulator_get_cpu_reg_a(Emulator* emulator)
{
return emulator->cpu->registery->a;
}
uint8_t emulator_get_cpu_reg_x(Emulator* emulator)
{
return emulator->cpu->registery->x;
}
uint8_t emulator_get_cpu_reg_y(Emulator* emulator)
{
return emulator->cpu->registery->y;
}
uint8_t emulator_get_cpu_reg_sp(Emulator* emulator)
{
return emulator->cpu->registery->sp;
}
void emulator_set_pc(Emulator* emulator, uint16_t pc)
{
emulator->cpu->registery->pc = pc;
}
void emulator_get_screen(Emulator* emulator, uint32_t* screen, size_t size)
{
memcpy_s(screen, size * sizeof(uint32_t), emulator->cpu->bus->ppu->screen, sizeof(uint32_t) * 256 * 240);
}
uint8_t emulator_get_screen_pixel(Emulator* emulator, int x, int y)
{
if (x >= 256 || y >= 240)
return;
uint32_t index = y * 256 + x;
return emulator->cpu->bus->ppu->screen[index];
}
bool emulator_is_screen_ready(Emulator* emulator)
{
return emulator->cpu->bus->ppu->frame;
}
void emulator_get_pattern_table(Emulator* emulator, uint8_t i, uint8_t palette, uint32_t* data)
{
ppu_get_pattern_table(emulator->cpu->bus->ppu, i, palette, data);
}
void emulator_get_palette_table(Emulator* emulator, uint8_t palette, uint32_t* data)
{
ppu_get_palette_table(emulator->cpu->bus->ppu, palette, data);
}
<file_sep>/Emulator/src/main.c
#include <emulator/emulator.h>
#include <emulator/car/core/cartridge.h>
#include <emulator/car/mappers/mapperDllProvider.h>
#include <utils/boolean.h>
#include <stdlib.h>
#include <stdio.h>
#include <SDL.h>
#define WIDTH 256
#define HEIGHT 240
int main(int argc, char ** argv)
{
if (SDL_Init(SDL_INIT_EVERYTHING) != 0)
return -4;
SDL_Window* window;
SDL_Renderer* renderer;
if (SDL_CreateWindowAndRenderer(WIDTH, HEIGHT, SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE, &window, &renderer) != 0)
{
SDL_LogError(SDL_LOG_CATEGORY_VIDEO, "Couldn't init screen/renderer");
SDL_LogError(SDL_LOG_CATEGORY_VIDEO, SDL_GetError());
SDL_Quit();
return -5;
}
Emulator* emulator = emulator_init();
if (emulator == NULL)
{
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't init the emulator");
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return -1;
}
Cartridge* cartridge = cartridge_load_from_file("nestest.nes ", &mapper_dll_provider);
if (cartridge == NULL)
{
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't load the cartridge");
emulator_destroy(emulator);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return -2;
}
emulator_insert_cartridge(emulator, cartridge);
emulator_signal_reset(emulator);
bool running = TRUE;
SDL_Texture* buffer = SDL_CreateTexture(renderer,
SDL_PIXELFORMAT_RGBA32,
SDL_TEXTUREACCESS_STREAMING,
WIDTH,
HEIGHT);
if (buffer == NULL)
{
SDL_LogError(SDL_LOG_CATEGORY_VIDEO, "Couldn't create the buffer");
SDL_LogError(SDL_LOG_CATEGORY_VIDEO, SDL_GetError());
cartridge_destroy(emulator_remove_cartridge(emulator));
emulator_destroy(emulator);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
}
uint32_t* pixels = (uint32_t*)malloc(sizeof(uint32_t) * WIDTH * HEIGHT);
if (pixels == NULL)
{
cartridge_destroy(emulator_remove_cartridge(emulator));
emulator_destroy(emulator);
SDL_DestroyTexture(buffer);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
}
int pitch = WIDTH * sizeof(uint32_t);
while (is_true(running))
{
SDL_Event event;
while (SDL_PollEvent(&event) != 0)
{
switch (event.type)
{
case SDL_QUIT:
running = FALSE;
break;
default:
break;
}
}
if (emulator_get_pc(emulator) == 0xC28F)
{
int i = 0;
}
emulator_tick(emulator);
if (emulator_is_screen_ready(emulator))
{
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
SDL_RenderClear(renderer);
emulator_get_screen(emulator, pixels, WIDTH * HEIGHT);
if (SDL_UpdateTexture(buffer, NULL, pixels, pitch) == 0)
{
if (SDL_RenderCopy(renderer, buffer, NULL, NULL) != 0)
{
SDL_LogError(SDL_LOG_CATEGORY_RENDER, "Couldn't render the buffer");
SDL_LogError(SDL_LOG_CATEGORY_RENDER, SDL_GetError());
}
}
else
{
SDL_LogError(SDL_LOG_CATEGORY_RENDER, "Couldn't update the buffer");
SDL_LogError(SDL_LOG_CATEGORY_RENDER, SDL_GetError());
}
SDL_RenderPresent(renderer);
}
}
free(pixels);
cartridge_destroy(emulator_remove_cartridge(emulator));
emulator_destroy(emulator);
SDL_DestroyTexture(buffer);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}<file_sep>/Emulator.core/src/emulator/ppu/core/PPU.c
#include "PPU.h"
#include <stdlib.h>
#include <string.h>
#include "palScreen.h"
#include "../../car/core/Cartridge.h"
PPU* ppu_init(Cartridge** car)
{
PPU* ppu = (PPU*)malloc(sizeof(PPU));
if (ppu == NULL)
return NULL;
memset(ppu, 0x00, sizeof(PPU));
ppu->screen = (uint32_t*)malloc(sizeof(uint32_t) * 256 * 240);
if (ppu->screen == NULL)
{
free(ppu);
return NULL;
}
memset(ppu->screen, 0x00, sizeof(uint32_t) * 256 * 240);
ppu->bus = ppu_bus_init(car);
if (ppu->bus == NULL)
{
free(ppu->screen);
free(ppu);
return NULL;
}
ppu->car = car;
return ppu;
}
void ppu_destroy(PPU* ppu)
{
if (ppu == NULL)
return;
ppu_bus_destroy(ppu->bus);
free(ppu->screen);
free(ppu);
}
uint8_t ppu_read(PPU* ppu, uint16_t addr)
{
uint8_t data = 0x00;
switch (addr)
{
case 0x0000: // Control
case 0x0001: // Mask
break;
case 0x0002: // Status
data = (ppu->status.reg & 0xE0) | (ppu->ppu_data_buffer & 0x1F);
ppu->status.vertical_blank = 0;
ppu->address_latch = 0;
break;
case 0x0003: // OAM Address
case 0x0004: // OAM Data
case 0x0005: // Scroll
case 0x0006: // PPU Address
break;
case 0x0007: // PPU Data
data = ppu->ppu_data_buffer;
ppu->ppu_data_buffer = ppu_bus_read(ppu->bus, ppu->vram_addr.reg);
if (ppu->vram_addr.reg >= 0x3F00)
data = ppu->ppu_data_buffer;
ppu->vram_addr.reg += (ppu->control.increment_mode ? 32 : 1);
break;
default:
break;
}
return data;
}
void ppu_write(PPU* ppu, uint16_t addr, uint8_t value)
{
switch (addr)
{
case 0x0000: // Control
ppu->control.reg = value;
ppu->tram_addr.nametable_x = ppu->control.nametable_x;
ppu->tram_addr.nametable_y = ppu->control.nametable_y;
break;
case 0x0001: // Mask
ppu->bus->mask.reg = value;
break;
case 0x0002: // Status
case 0x0003: // OAM Address
case 0x0004: // OAM Data
break;
case 0x0005: // Scroll
if (ppu->address_latch == 0)
{
ppu->fine_x = value & 0x07;
ppu->tram_addr.coarse_x = value >> 3;
ppu->address_latch = 1;
}
else
{
ppu->tram_addr.fine_y = value & 0x07;
ppu->tram_addr.coarse_y = value >> 3;
ppu->address_latch = 0;
}
break;
case 0x0006: // PPU Address
if (ppu->address_latch == 0)
{
ppu->tram_addr.reg = (uint16_t)((value & 0x3F) << 8) | (ppu->tram_addr.reg & 0x00FF);
ppu->address_latch = 1;
}
else
{
ppu->tram_addr.reg = (ppu->tram_addr.reg & 0xFF00) | value;
ppu->vram_addr = ppu->tram_addr;
ppu->address_latch = 0;
}
break;
case 0x0007: // PPU Data
ppu_bus_write(ppu->bus, ppu->vram_addr.reg, value);
ppu->vram_addr.reg += (ppu->control.increment_mode ? 32 : 1);
break;
default:
return;
}
}
void ppu_tick(PPU * ppu)
{
ppu->bus->mask.render_background = 1;
if (ppu->scanline >= -1 && ppu->scanline < 240)
{
if (ppu->scanline == 0 && ppu->cycle == 0)
ppu->cycle = 1;
if (ppu->scanline == -1 && ppu->cycle == 1)
ppu->status.vertical_blank = 0;
if ((ppu->cycle >= 2 && ppu->cycle < 258) || (ppu->cycle >= 321 && ppu->cycle < 338))
{
ppu_update_shifters(ppu);
switch ((ppu->cycle - 1) % 8)
{
case 0:
ppu_load_background_shifters(ppu);
ppu->bg_next_tile_id = ppu_bus_read(ppu->bus, 0x2000 | (ppu->vram_addr.reg & 0x0FFF));
break;
case 2:
ppu->bg_next_tile_attrib = ppu_bus_read(ppu->bus,
0x23C0
| (ppu->vram_addr.nametable_y << 11)
| (ppu->vram_addr.nametable_x << 10)
| ((ppu->vram_addr.coarse_y >> 2) << 3)
| (ppu->vram_addr.coarse_x >> 2));
if (ppu->vram_addr.coarse_y & 0x02)
ppu->bg_next_tile_attrib >>= 4;
if (ppu->vram_addr.coarse_x & 0x02)
ppu->bg_next_tile_attrib >>= 2;
ppu->bg_next_tile_attrib &= 0x03;
break;
case 4:
ppu->bg_next_tile_lsb = ppu_bus_read(ppu->bus, (ppu->control.pattern_background << 12) + ((uint16_t)ppu->bg_next_tile_id << 4) + (ppu->vram_addr.fine_y) + 0);
break;
case 6:
ppu->bg_next_tile_msb = ppu_bus_read(ppu->bus, (ppu->control.pattern_background << 12) + ((uint16_t)ppu->bg_next_tile_id << 4) + (ppu->vram_addr.fine_y) + 8);
break;
case 7:
ppu_increment_scroll_x(ppu);
break;
}
}
if (ppu->cycle == 256)
ppu_increment_scroll_y(ppu);
if (ppu->cycle == 257)
{
ppu_load_background_shifters(ppu);
ppu_transfer_address_x(ppu);
}
if (ppu->cycle == 338 || ppu->cycle == 340)
ppu->bg_next_tile_id = ppu_bus_read(ppu->bus, 0x2000 | (ppu->vram_addr.reg & 0x0FFF));
if (ppu->scanline == -1 && ppu->cycle >= 280 && ppu->cycle < 305)
ppu_transfer_address_y(ppu);
}
if (ppu->scanline >= 241 && ppu->scanline < 261)
{
if (ppu->scanline == 241 && ppu->cycle == 1)
{
ppu->status.vertical_blank = 1;
if (ppu->control.enable_nmi)
ppu->nmi = TRUE;
}
}
uint8_t bg_pixel = 0x00;
uint8_t bg_palette = 0x00;
if (ppu->bus->mask.render_background)
{
uint16_t bit_mux = 0x8000 >> ppu->fine_x;
uint8_t p0_pixel = (ppu->bg_shifter_pattern_lo & bit_mux) > 0;
uint8_t p1_pixel = (ppu->bg_shifter_pattern_hi & bit_mux) > 0;
bg_pixel = (p1_pixel << 1) | p0_pixel;
uint8_t bg_pal0 = (ppu->bg_shifter_attrib_lo & bit_mux) > 0;
uint8_t bg_pal1 = (ppu->bg_shifter_attrib_hi & bit_mux) > 0;
bg_palette = (bg_pal1 << 1) | bg_pal0;
}
uint32_t color = ppu_get_color_from_palette_ram(ppu, bg_palette, bg_pixel);
ppu_set_pixel(ppu, ppu->cycle - 1, ppu->scanline, color);
ppu->frame = FALSE;
ppu->cycle++;
if (ppu->cycle >= 341)
{
ppu->cycle = 0;
ppu->scanline++;
if (ppu->scanline >= 261)
{
ppu->scanline = -1;
ppu->frame = TRUE;
}
}
}
void ppu_update_shifters(PPU* ppu)
{
if (ppu->bus->mask.render_background)
{
ppu->bg_shifter_attrib_lo <<= 1;
ppu->bg_shifter_attrib_hi <<= 1;
ppu->bg_shifter_attrib_lo <<= 1;
ppu->bg_shifter_attrib_hi <<= 1;
}
}
void ppu_load_background_shifters(PPU* ppu)
{
ppu->bg_shifter_pattern_lo = (ppu->bg_shifter_pattern_lo & 0xFF00) | ppu->bg_next_tile_lsb;
ppu->bg_shifter_pattern_hi = (ppu->bg_shifter_pattern_hi & 0xFF00) | ppu->bg_next_tile_msb;
ppu->bg_shifter_attrib_lo = (ppu->bg_shifter_attrib_lo & 0xFF00) | ((ppu->bg_next_tile_attrib & 0b01) ? 0xFF : 0x00);
ppu->bg_shifter_attrib_hi = (ppu->bg_shifter_attrib_hi & 0xFF00) | ((ppu->bg_next_tile_attrib & 0b10) ? 0xFF : 0x00);
}
void ppu_increment_scroll_x(PPU* ppu)
{
if (ppu->bus->mask.render_background || ppu->bus->mask.render_sprites)
{
if (ppu->vram_addr.coarse_x == 31)
{
ppu->vram_addr.coarse_x = 0;
ppu->vram_addr.nametable_x = ~(ppu->vram_addr.nametable_x);
}
else
ppu->vram_addr.coarse_x++;
}
}
void ppu_increment_scroll_y(PPU* ppu)
{
if (ppu->bus->mask.render_background || ppu->bus->mask.render_sprites)
{
if (ppu->vram_addr.fine_y < 7)
ppu->vram_addr.fine_y++;
else
{
ppu->vram_addr.fine_y = 0;
if (ppu->vram_addr.coarse_y == 29)
{
ppu->vram_addr.coarse_y = 0;
ppu->vram_addr.nametable_y = ~ppu->vram_addr.nametable_y;
}
else if (ppu->vram_addr.coarse_y == 31)
ppu->vram_addr.coarse_y = 0;
else
ppu->vram_addr.coarse_y++;
}
}
}
void ppu_transfer_address_x(PPU* ppu)
{
if (ppu->bus->mask.render_background || ppu->bus->mask.render_sprites)
{
ppu->vram_addr.nametable_x = ppu->tram_addr.nametable_x;
ppu->vram_addr.coarse_x = ppu->tram_addr.coarse_x;
}
}
void ppu_transfer_address_y(PPU* ppu)
{
if (ppu->bus->mask.render_background || ppu->bus->mask.render_sprites)
{
ppu->vram_addr.fine_y = ppu->tram_addr.fine_y;
ppu->vram_addr.nametable_y = ppu->tram_addr.nametable_y;
ppu->vram_addr.coarse_y = ppu->tram_addr.coarse_y;
}
}
void ppu_set_pixel(PPU* ppu, uint16_t x, uint16_t y, uint32_t color)
{
if (x >= 256 || y >= 240)
return;
if (color == 0xFF000000)
return;
uint32_t index = y * 256 + x;
ppu->screen[index] = color;
}
uint32_t ppu_get_color_from_palette_ram(PPU* ppu, uint8_t palette, uint8_t pixel)
{
uint16_t addr = 0x3F00 + ((uint16_t)palette << 2) + pixel;
uint16_t val = ppu_bus_read(ppu->bus, addr);
return palScreen[val & 0x3F];
}
#include <stdio.h>
void ppu_get_palette_table(PPU* ppu, uint8_t palette, uint32_t* data)
{
if (data == NULL)
{
printf("%08u %08u %08u %08u\n",
ppu_get_color_from_palette_ram(ppu, palette, 0),
ppu_get_color_from_palette_ram(ppu, palette, 1),
ppu_get_color_from_palette_ram(ppu, palette, 2),
ppu_get_color_from_palette_ram(ppu, palette, 3)
);
}
else
{
data[0] = ppu_get_color_from_palette_ram(ppu, palette, 0);
data[1] = ppu_get_color_from_palette_ram(ppu, palette, 1);
data[2] = ppu_get_color_from_palette_ram(ppu, palette, 2);
data[3] = ppu_get_color_from_palette_ram(ppu, palette, 3);
}
}
void ppu_get_pattern_table(PPU* ppu, uint8_t i, uint8_t palette, uint32_t* data)
{
for (uint16_t nTileY = 0; nTileY < 16; nTileY++)
{
for (uint16_t nTileX = 0; nTileX < 16; nTileX++)
{
// Convert the 2D tile coordinate into a 1D offset into the pattern
// table memory.
uint16_t nOffset = nTileY * 256 + nTileX * 16;
// Now loop through 8 rows of 8 pixels
for (uint16_t row = 0; row < 8; row++)
{
// For each row, we need to read both bit planes of the character
// in order to extract the least significant and most significant
// bits of the 2 bit pixel value. in the CHR ROM, each character
// is stored as 64 bits of lsb, followed by 64 bits of msb. This
// conveniently means that two corresponding rows are always 8
// bytes apart in memory.
uint8_t tile_lsb = ppu_bus_read(ppu->bus, i * 0x1000 + nOffset + row + 0x0000);
uint8_t tile_msb = ppu_bus_read(ppu->bus, i * 0x1000 + nOffset + row + 0x0008);
// Now we have a single row of the two bit planes for the character
// we need to iterate through the 8-bit words, combining them to give
// us the final pixel index
for (uint16_t col = 0; col < 8; col++)
{
// We can get the index value by simply adding the bits together
// but we're only interested in the lsb of the row words because...
uint8_t pixel = (tile_lsb & 0x01) + (tile_msb & 0x01);
// ...we will shift the row words 1 bit right for each column of
// the character.
tile_lsb >>= 1; tile_msb >>= 1;
// Now we know the location and NES pixel value for a specific location
// in the pattern table, we can translate that to a screen colour, and an
// (x,y) location in the sprite
data[nTileX * 8 + (7 - col) + (nTileY * 8 + row)* 128] = ppu_get_color_from_palette_ram(ppu, palette, pixel);
}
}
}
}
}
<file_sep>/Utils/include/utils/utils.h
#ifndef __UTILS_H__
#define __UTILS_H__
#ifdef UTILS_EXPORTS
#define UTILS_API __declspec(dllexport)
#else
#define UTILS_API __declspec(dllimport)
#endif
#endif
<file_sep>/Emulator.core/include/emulator/car/core/cartridge.h
#ifndef __DLL_EMULATOR_CAR_CORE_CARTRIDGE_H__
#define __DLL_EMULATOR_CAR_CORE_CARTRIDGE_H__
#include <emulator/emulator_core.h>
#include <emulator/car/mappers/mapperProvider.h>
typedef struct cartridge Cartridge;
EMULATOR_API Cartridge* cartridge_load_from_file(const char* file, car_mapper_provider provider);
EMULATOR_API void cartridge_destroy(Cartridge* cartridge);
#endif<file_sep>/Emulator.Mapper0/src/emulator/car/mappers/Mapper0.h
#ifndef __EMULATOR_CAR_MAPPERS_MAPPER_0_H__
#define __EMULATOR_CAR_MAPPERS_MAPPER_0_H__
#include <emulator/car/mappers/mapper0.h>
#include <utils/boolean.h>
bool mapper_0_init(Mapper* mapper);
void mapper_0_destroy(Mapper* mapper);
uint32_t mapper_0_prg_read(Mapper* mapper, uint16_t addr);
uint32_t mapper_0_prg_write(Mapper* mapper, uint16_t addr);
uint32_t mapper_0_chr_read(Mapper* mapper, uint16_t addr);
uint32_t mapper_0_chr_write(Mapper* mapper, uint16_t addr);
#endif<file_sep>/Utils/src/utils/Boolean.h
#ifndef __UTILS_BOOLEAN_H__
#define __UTILS_BOOLEAN_H__
#include <utils/boolean.h>
#endif
<file_sep>/Emulator.core/src/emulator/ppu/bus/PPU_Bus.h
#ifndef __EMULATOR_PPU_BUS_H__
#define __EMULATOR_PPU_BUS_H__
#include "../../car/core/Cartridge.h"
#include "TblName.h"
typedef struct ppu_bus
{
Cartridge** car;
Tbl_Name* tbl_name;
uint8_t tblPalette[32];
union mask_u
{
struct
{
uint8_t grayscale : 1;
uint8_t render_background_left : 1;
uint8_t render_sprites_left : 1;
uint8_t render_background : 1;
uint8_t render_sprites : 1;
uint8_t enhance_red : 1;
uint8_t enhance_green : 1;
uint8_t enhance_blue : 1;
};
uint8_t reg;
} mask;
} PPU_Bus;
PPU_Bus* ppu_bus_init(Cartridge**car);
void ppu_bus_destroy(PPU_Bus* bus);
uint8_t ppu_bus_read(PPU_Bus* bus, uint16_t addr);
void ppu_bus_write(PPU_Bus* bus, uint16_t addr, uint8_t value);
#endif<file_sep>/Emulator.core/src/emulator/ppu/bus/PPU_Bus.c
#include "PPU_Bus.h"
#include <stdlib.h>
#include <string.h>
PPU_Bus* ppu_bus_init(Cartridge** car)
{
PPU_Bus* bus = (PPU_Bus*)malloc(sizeof(PPU_Bus));
if (bus == NULL)
return NULL;
bus->car = car;
bus->tbl_name = tbl_name_init();
if (bus->tbl_name == NULL)
{
free(bus);
return NULL;
}
memset(bus->tblPalette, 0x00, 32 * sizeof(uint8_t));
return bus;
}
void ppu_bus_destroy(PPU_Bus* bus)
{
if (bus == NULL)
return;
tbl_name_destroy(bus->tbl_name);
free(bus);
}
uint8_t ppu_bus_read(PPU_Bus* bus, uint16_t addr)
{
addr &= 0x3FFF;
if (addr >= 0x0000 && addr <= 0x1FFF)
return cartridge_read_chr(*(bus->car), addr);
if (addr >= 0x2000 && addr <= 0x3EFF)
return tbl_name_read(bus->tbl_name, addr, (*bus->car)->mirror);
if (addr >= 0x3F00 && addr <= 0x3FFF)
{
addr &= 0x001F;
if (addr == 0x0010)
addr = 0x0000;
if (addr == 0x0014)
addr = 0x0004;
if (addr == 0x0018)
addr = 0x0008;
if (addr == 0x001C)
addr = 0x000C;
return bus->tblPalette[addr] &(bus->mask.grayscale ? 0x30 : 0x3F);
}
return 0x00;
}
void ppu_bus_write(PPU_Bus* bus, uint16_t addr, uint8_t value)
{
addr &= 0x3FFF;
if (addr >= 0x0000 && addr <= 0x1FFF)
cartridge_write_chr(*(bus->car), addr, value);
if (addr >= 0x2000 && addr <= 0x3EFF)
tbl_name_write(bus->tbl_name, addr, value, (*bus->car)->mirror);
if (addr >= 0x3F00 && addr <= 0x3FFF)
{
addr &= 0x001F;
if (addr == 0x0010)
addr = 0x0000;
if (addr == 0x0014)
addr = 0x0004;
if (addr == 0x0018)
addr = 0x0008;
if (addr == 0x001C)
addr = 0x000C;
bus->tblPalette[addr] = value;
}
}<file_sep>/Emulator.MapperProvider/include/emulator/car/mappers/mapperDllProvider.h
#ifndef __DLL_EMULATOR_MAPPER_DLL_PROVIDER__
#define __DLL_EMULATOR_MAPPER_DLL_PROVIDER__
#ifndef EMULATOR_MAPPER_DLL_PROVIDER_API
#ifdef EMULATORMAPPERPROVIDER_EXPORTS
#define EMULATOR_MAPPER_DLL_PROVIDER_API __declspec(dllexport)
#else
#define EMULATOR_MAPPER_DLL_PROVIDER_API __declspec(dllimport)
#endif
#endif
#include <emulator/car/mappers/mapperProvider.h>
EMULATOR_MAPPER_DLL_PROVIDER_API Mapper* mapper_dll_provider(uint16_t mapper_id);
#endif<file_sep>/Emulator.core/src/emulator/cpu/core/CPU.c
#include "CPU.h"
#include "instructions/cpu_instructions.h"
#include <stdlib.h>
#include <string.h>
CPU* cpu_init()
{
CPU* cpu = (CPU*)malloc(sizeof(CPU));
if (cpu == NULL)
return NULL;
memset(cpu, 0x00, sizeof(CPU));
cpu->registery = registery_init();
if (cpu->registery == NULL)
{
free(cpu);
return NULL;
}
cpu->bus = bus_init();
if (cpu->bus == NULL) {
registery_destroy(cpu->registery);
free(cpu);
return NULL;
}
cpu->registery->sp = STACK_POINTER_INIT;
return cpu;
}
void cpu_destroy(CPU* cpu)
{
if (cpu == NULL)
return;
bus_destroy(cpu->bus);
registery_destroy(cpu->registery);
free(cpu);
}
void cpu_status_set(const CPU* cpu, flag flag, bool value)
{
status_set(cpu->registery, flag, value);
}
bool cpu_status_get(const CPU* cpu, flag flag)
{
return status_get(cpu->registery, flag);
}
uint8_t cpu_bus_read(const CPU* cpu, uint16_t address)
{
return bus_read(cpu->bus, address);
}
void cpu_bus_write(const CPU* cpu, uint16_t address, uint8_t value)
{
bus_write(cpu->bus, address, value);
}
uint8_t cpu_bus_read_pc(const CPU* cpu)
{
uint8_t value = cpu_bus_read(cpu, cpu->registery->pc);
cpu->registery->pc++;
return value;
}
void cpu_stack_push(const CPU* cpu, uint8_t value)
{
cpu_bus_write(cpu, STACK_POSITION_START + cpu->registery->sp, value);
cpu->registery->sp--;
}
uint8_t cpu_stack_pop(const CPU* cpu)
{
cpu->registery->sp++;
uint8_t value = cpu_bus_read(cpu, STACK_POSITION_START + cpu->registery->sp);
return value;
}
// Reset Interrupt - Forces CPU into known state
//
// in core, the core to send the reset interrupt
void cpu_signal_reset(CPU* cpu)
{
cpu->addr_abs = RESET_ADDRESS;
uint16_t lo = cpu_bus_read(cpu, cpu->addr_abs);
uint16_t hi = cpu_bus_read(cpu, cpu->addr_abs + 1);
cpu->registery->pc = (hi << 8) | lo;
cpu->registery->a = 0x00;
cpu->registery->x = 0x00;
cpu->registery->y = 0x00;
cpu->registery->sp = STACK_POINTER_INIT;
cpu->registery->s = 0x00 | FLAG_U;
cpu->addr_rel = 0x0000;
cpu->addr_abs = 0x0000;
cpu->alu = 0x00;
cpu->cycles = 0x00;
}
// Interrupt Request - Executes an instruction at a specific location
//
// in core, the core to send the interrupt request
void cpu_signal_irq(CPU* cpu)
{
if (cpu_status_get(cpu, FLAG_I) == TRUE)
return;
cpu_stack_push(cpu, (cpu->registery->pc >> 8) & 0x00ff);
cpu_stack_push(cpu, cpu->registery->pc & 0x00ff);
cpu_status_set(cpu, FLAG_B, FALSE);
cpu_status_set(cpu, FLAG_U, TRUE);
cpu_status_set(cpu, FLAG_I, TRUE);
cpu_stack_push(cpu, cpu->registery->s);
cpu->addr_abs = IRQ_ADDRESS;
uint16_t lo = cpu_bus_read(cpu, cpu->addr_abs);
uint16_t hi = cpu_bus_read(cpu, cpu->addr_abs + 1);
cpu->registery->pc = (hi << 8) | lo;
cpu->cycles = 7;
}
// Non-Maskable Interrupt Request - Executes an instruction at a specific
// location, but cannot be disabled
//
// in core, the core to send the interrupt request
void cpu_signal_nmi(CPU * cpu)
{
cpu_stack_push(cpu, (cpu->registery->pc >> 8) & 0x00ff);
cpu_stack_push(cpu, cpu->registery->pc & 0x00ff);
cpu_status_set(cpu, FLAG_B, FALSE);
cpu_status_set(cpu, FLAG_U, TRUE);
cpu_status_set(cpu, FLAG_I, TRUE);
cpu_stack_push(cpu, cpu->registery->s);
cpu->addr_abs = NMI_ADDRESS;
uint16_t lo = cpu_bus_read(cpu, cpu->addr_abs);
uint16_t hi = cpu_bus_read(cpu, cpu->addr_abs + 1);
cpu->registery->pc = (hi << 8) | lo;
cpu->cycles = 8;
}
// Perform one clock cycle's worth of update
//
// in core, the core to tick
void cpu_clock(CPU* cpu)
{
#ifdef COUNT_CYCLES
cpu->clock_count++;
#endif
if (cpu_clock_complete(cpu) == FALSE) {
cpu->cycles--;
return;
}
if (cpu->registery->pc == 0x0C6BD)
{
int i = 0;
}
cpu->opt_code = cpu_bus_read_pc(cpu);
cpu_status_set(cpu, FLAG_U, TRUE);
cpu->cycles = lookup[cpu->opt_code].cycles;
uint8_t extra_addr = (*lookup[cpu->opt_code].addr_mode)(cpu);
uint8_t extra_oper = (*lookup[cpu->opt_code].operation)(cpu);
cpu->cycles += extra_addr & extra_oper;
cpu_status_set(cpu, FLAG_U, TRUE);
cpu->cycles--;
}
bool cpu_clock_complete(const CPU * cpu)
{
if (cpu->cycles == 0)
return TRUE;
return FALSE;
}
<file_sep>/Emulator.core/include/emulator/emulator.h
#ifndef __EMULATOR_H__
#define __EMULATOR_H__
#include "emulator_core.h"
#include <utils/boolean.h>
#include <inttypes.h>
typedef struct cartridge Cartridge;
typedef struct emulator Emulator;
EMULATOR_API Emulator* emulator_init();
EMULATOR_API void emulator_destroy(Emulator* emulator);
EMULATOR_API Cartridge* emulator_insert_cartridge(Emulator* emulator, Cartridge* car);
EMULATOR_API Cartridge* emulator_remove_cartridge(Emulator* emulator);
EMULATOR_API void emulator_signal_reset(Emulator* emulator);
EMULATOR_API void emulator_tick(Emulator* emulator);
EMULATOR_API uint8_t emulator_get_cpu_bus(Emulator* emulator, uint16_t addr);
EMULATOR_API uint8_t emulator_get_ppu_bus(Emulator* emulator, uint16_t addr);
EMULATOR_API uint16_t emulator_get_pc(Emulator* emulator);
EMULATOR_API uint8_t emulator_get_cpu_reg_a(Emulator* emulator);
EMULATOR_API uint8_t emulator_get_cpu_reg_x(Emulator* emulator);
EMULATOR_API uint8_t emulator_get_cpu_reg_y(Emulator* emulator);
EMULATOR_API uint8_t emulator_get_cpu_reg_sp(Emulator* emulator);
EMULATOR_API void emulator_set_pc(Emulator* emulator, uint16_t pc);
EMULATOR_API void emulator_get_screen(Emulator* emulator, uint32_t* screen, size_t size);
EMULATOR_API uint8_t emulator_get_screen_pixel(Emulator* emulator, int x, int y);
EMULATOR_API bool emulator_is_screen_ready(Emulator* emulator);
EMULATOR_API void emulator_get_pattern_table(Emulator* emulator, uint8_t i, uint8_t palette, uint32_t* data);
EMULATOR_API void emulator_get_palette_table(Emulator* emulator, uint8_t palette, uint32_t* data);
#endif<file_sep>/Emulator.core/src/emulator/Emulator.h
#ifndef __EMULATOR_EMULATOR_H__
#define __EMULATOR_EMULATOR_H__
#include "cpu/core/CPU.h"
struct emulator
{
CPU* cpu;
uint8_t cpu_timer;
};
#include <emulator/emulator.h>
#endif<file_sep>/Emulator.core/src/emulator/ppu/core/PPU.h
#ifndef __EMULATOR_PPU_CORE_PPU_H__
#define __EMULATOR_PPU_CORE_PPU_H__
#include <inttypes.h>
#include "../../car/core/Cartridge.h"
#include "../bus/PPU_Bus.h"
typedef union loopy_register_u
{
// Source OneLoneCoder
// Credit to Loopy for working this out
struct
{
uint16_t coarse_x : 5;
uint16_t coarse_y : 5;
uint16_t nametable_x : 1;
uint16_t nametable_y : 1;
uint16_t fine_y : 3;
uint16_t unused : 1;
};
uint16_t reg;
} loopy_register;
typedef struct ppu {
PPU_Bus* bus;
Cartridge ** car;
//uint8_t tblPattern[2][4096];
int16_t scanline;
int16_t cycle;
bool frame;
union status_u
{
struct {
uint8_t unused : 5;
uint8_t sprite_overflow : 1;
uint8_t sprite_zero_hit : 1;
uint8_t vertical_blank : 1;
};
uint8_t reg;
} status;
union PPUCTRL
{
struct
{
uint8_t nametable_x : 1;
uint8_t nametable_y : 1;
uint8_t increment_mode : 1;
uint8_t pattern_sprite : 1;
uint8_t pattern_background : 1;
uint8_t sprite_size : 1;
uint8_t slave_mode : 1; // unused
uint8_t enable_nmi : 1;
};
uint8_t reg;
} control;
loopy_register vram_addr;
loopy_register tram_addr;
uint8_t fine_x;
uint8_t address_latch;
uint8_t ppu_data_buffer;
uint8_t bg_next_tile_id;
uint8_t bg_next_tile_attrib;
uint8_t bg_next_tile_lsb;
uint8_t bg_next_tile_msb;
uint16_t bg_shifter_pattern_lo;
uint16_t bg_shifter_pattern_hi;
uint16_t bg_shifter_attrib_lo;
uint16_t bg_shifter_attrib_hi;
bool nmi;
uint32_t* screen;
} PPU;
PPU* ppu_init(Cartridge** car);
void ppu_destroy(PPU* ppu);
uint8_t ppu_read(PPU* ppu, uint16_t addr);
void ppu_write(PPU* ppu, uint16_t addr, uint8_t value);
void ppu_tick(PPU* ppu);
void ppu_update_shifters(PPU* ppu);
void ppu_load_background_shifters(PPU* ppu);
void ppu_increment_scroll_x(PPU* ppu);
void ppu_increment_scroll_y(PPU* ppu);
void ppu_transfer_address_x(PPU* ppu);
void ppu_transfer_address_y(PPU* ppu);
void ppu_set_pixel(PPU* ppu, uint16_t x, uint16_t y, uint32_t color);
uint32_t ppu_get_color_from_palette_ram(PPU* ppu, uint8_t palette, uint8_t pixel);
void ppu_get_palette_table(PPU* ppu, uint8_t palette, uint32_t* data);
void ppu_get_pattern_table(PPU* ppu, uint8_t i, uint8_t palette, uint32_t* data);
#endif<file_sep>/Emulator.core/src/emulator/cpu/core/CPU.h
#ifndef __EMULATOR_CPU_CORE_CPU_H__
#define __EMULATOR_CPU_CORE_CPU_H__
#include "Registery.h"
#include "settings.h"
#include "../bus/Bus.h"
typedef struct cpu {
Registery * registery;
uint8_t alu; // Represents the working input value to the ALU
uint16_t temp; // A convenience variable used everywhere
uint16_t addr_abs; // All used memory addresses end up in here
uint16_t addr_rel; // Represents absolute address following a branch
uint8_t opt_code; // Is the instruction byte
uint8_t cycles; // Counts how many cycles the instruction has remaining
#ifdef COUNT_CYCLES
uint64_t clock_count; // global accumulation of the number of clocks
#endif
Bus* bus;
} CPU;
CPU* cpu_init();
void cpu_destroy(CPU* cpu);
void cpu_status_set(const CPU* cpu, flag flag, bool value);
bool cpu_status_get(const CPU* cpu, flag flag);
uint8_t cpu_bus_read(const CPU* cpu, uint16_t address);
void cpu_bus_write(const CPU* cpu, uint16_t address, uint8_t value);
uint8_t cpu_bus_read_pc(const CPU* cpu);
void cpu_stack_push(const CPU* cpu, uint8_t value);
uint8_t cpu_stack_pop(const CPU* cpu);
// Reset Interrupt - Forces CPU into known state
//
// in core, the core to send the reset interrupt
void cpu_signal_reset(CPU* cpu);
// Interrupt Request - Executes an instruction at a specific location
//
// in core, the core to send the interrupt request
void cpu_signal_irq(CPU* cpu);
// Non-Maskable Interrupt Request - Executes an instruction at a specific
// location, but cannot be disabled
//
// in core, the core to send the interrupt request
void cpu_signal_nmi(CPU* cpu);
// Perform one clock cycle's worth of update
//
// in core, the core to tick
void cpu_clock(CPU* cpu);
bool cpu_clock_complete(const CPU* cpu);
#endif
<file_sep>/Emulator.core/src/emulator/car/core/CartridgeHeader.h
#ifndef __EMULATOR_CAR_CORE_CARTRIDGE_HEADER__H__
#define __EMULATOR_CAR_CORE_CARTRIDGE_HEADER_H__
#include <inttypes.h>
typedef struct cartridge_header {
char name[4];
uint8_t size_prg_rom;
uint8_t size_chr_rom;
uint8_t mapper_1;
uint8_t mapper_2;
uint8_t size_prg_ram;
uint8_t tv_system_1;
uint8_t tv_system_2;
uint8_t unused_1;
uint8_t unused_2;
uint8_t unused_3;
uint8_t unused_4;
uint8_t unused_5;
} CartridgeHeader;
#endif<file_sep>/Emulator.core/src/main.c
#include "emulator/cpu/core/CPU.h"
int main(int argv, char** args)
{
CPU* cpu = cpu_init();
cpu_destroy(cpu);
return cpu != NULL ? 0 : 1;
}<file_sep>/Emulator.core/src/emulator/cpu/core/settings.h
#ifndef __EMULATOR_CPU_CORE_SETTINGS_H__
#define __EMULATOR_CPU_CORE_SETTINGS_H__
#define INCLUDE_HARDWARE_BUG_IND
#define RAM_START 0x0000
#define RAM_END 0x07ff
#define RAM_CLONE_START 0x0800
#define RAM_CLONE_END 0x1fff
#define PPU_START 0x2000
#define PPU_END 0x2007
#define PPU_CLONE_START 0x2008
#define PPU_CLONE_END 0x3fff
#define APU_START 0x4000
#define APU_END 0x4017
#define DIS_START 0x4018
#define DIS_END 0x401f
#define ROM_START 0x4020
#define ROM_END 0xffff
#define CAR_PRG_START 0x8000
#define CAR_PRG_END 0xffff
#define RESET_ADDRESS 0xFFFC // the address where the pc will be fetched after a reset signal
#define IRQ_ADDRESS 0xFFFE // the address where the pc will be fetched after a irq signal
#define NMI_ADDRESS 0xFFFA // the address where the pc will be fetched after a nmi signal
#define STACK_POINTER_INIT 0x00FD // the stack pointer initial value
#define STACK_POSITION_START 0x0100 // the starting position of the stack
#endif<file_sep>/Emulator.MapperProvider/src/emulator/car/mappers/MapperDllProvider.h
#ifndef __EMULATOR_CAR_MAPPERS_MAPPER_DLL_PROVIDER_H__
#define __EMULATOR_CAR_MAPPERS_MAPPER_DLL_PROVIDER_H__
#include <emulator/car/mappers/mapperDllProvider.h>
#endif<file_sep>/Emulator.core/src/emulator/cpu/bus/Bus.h
#ifndef __EMULATOR_CPU_BUS_BUS_H__
#define __EMULATOR_CPU_BUS_BUS_H__
#include "../../ppu/core/PPU.h"
#include "../../car/core/Cartridge.h"
#include <inttypes.h>
typedef struct bus {
uint8_t* ram;
PPU* ppu;
Cartridge* car;
} Bus;
Bus* bus_init();
uint8_t bus_read(const Bus* bus, uint16_t addr);
void bus_write(const Bus* bus, uint16_t addr, uint8_t value);
Cartridge* bus_destroy(Bus* bus);
Cartridge* bus_insert_cartridge(Bus* bus, Cartridge* car);
Cartridge* bus_remove_cartridge(Bus* bus);
void bus_tick(Bus* bus);
#endif
<file_sep>/Emulator.Mapper0/include/emulator/car/mappers/mapper0.h
#ifndef __DLL_MAPPER0__
#define __DLL_MAPPER0__
#ifndef EMULATOR_MAPPER0_API
#ifdef EMULATORMAPPER0_EXPORTS
#define EMULATOR_MAPPER0_API __declspec(dllexport)
#else
#define EMULATOR_MAPPER0_API __declspec(dllimport)
#endif
#endif
#include <emulator/car/mappers/mapperProvider.h>
EMULATOR_MAPPER0_API Mapper* mapper_0_provider(uint16_t mapper_id);
#endif<file_sep>/Emulator.core/src/emulator/cpu/core/instructions/addressing.c
#include "addressing.h"
#include "../settings.h"
// Address Mode: Implied
// There is no additional data required for this instruction. The instruction
// does something very simple like like sets a status bit. However, we will
// target the accumulator, for instructions like PHA
uint8_t cpu_instruction_addressing_imp(CPU* core)
{
core->alu = core->registery->a;
return 0;
}
// Address Mode: Immediate
// The instruction expects the next byte to be used as a value, so we'll prep
// the read address to point to the next byte
uint8_t cpu_instruction_addressing_imm(CPU* core)
{
core->addr_abs = core->registery->pc++;
return 0;
}
// Address Mode: Zero Page
// To save program bytes, zero page addressing allows you to absolutely address
// a location in first 0xFF bytes of address range. Clearly this only requires
// one byte instead of the usual two.
uint8_t cpu_instruction_addressing_zp0(CPU* core)
{
core->addr_abs = cpu_bus_read_pc(core);
core->addr_abs &= 0x00ff;
return 0;
}
// Address Mode: Zero Page with X Offset
// Fundamentally the same as Zero Page addressing, but the contents of the X Register
// is added to the supplied single byte address. This is useful for iterating through
// ranges within the first page.
uint8_t cpu_instruction_addressing_zpx(CPU* core)
{
core->addr_abs = cpu_bus_read_pc(core);
core->addr_abs += core->registery->x;
core->addr_abs &= 0x00ff;
return 0;
}
// Address Mode: Zero Page with Y Offset
// Fundamentally the same as Zero Page addressing, but the contents of the Y Register
// is added to the supplied single byte address. This is useful for iterating through
// ranges within the first page.
uint8_t cpu_instruction_addressing_zpy(CPU* core)
{
core->addr_abs = cpu_bus_read_pc(core);
core->addr_abs += core->registery->y;
core->addr_abs &= 0x00ff;
return 0;
}
// Address Mode: Relative
// This address mode is exclusive to branch instructions. The address
// must reside within -128 to +127 of the branch instruction, i.e.
// you cant directly branch to any address in the addressable range.
uint8_t cpu_instruction_addressing_rel(CPU* core)
{
core->addr_rel = cpu_bus_read_pc(core);
if (core->addr_rel & 0x80)
core->addr_rel |= 0xff00;
return 0;
}
// Address Mode: Absolute
// A full 16-bit address is loaded and used
uint8_t cpu_instruction_addressing_abs(CPU* core)
{
uint16_t lo = cpu_bus_read_pc(core);
uint16_t hi = cpu_bus_read_pc(core);
core->addr_abs = (hi << 8) | lo;
return 0;
}
// Address Mode: Absolute with X Offset
// Fundamentally the same as absolute addressing, but the contents of the X Register
// is added to the supplied two byte address. If the resulting address changes
// the page, an additional clock cycle is required
uint8_t cpu_instruction_addressing_abx(CPU* core)
{
uint16_t lo = cpu_bus_read_pc(core);
uint16_t hi = cpu_bus_read_pc(core);
core->addr_abs = (hi << 8) | lo;
core->addr_abs += core->registery->x;
if ((core->addr_abs & 0xff00) != (hi << 8))
return 1;
return 0;
}
// Address Mode: Absolute with Y Offset
// Fundamentally the same as absolute addressing, but the contents of the Y Register
// is added to the supplied two byte address. If the resulting address changes
// the page, an additional clock cycle is required
uint8_t cpu_instruction_addressing_aby(CPU* core)
{
uint16_t lo = cpu_bus_read_pc(core);
uint16_t hi = cpu_bus_read_pc(core);
core->addr_abs = (hi << 8) | lo;
core->addr_abs += core->registery->y;
if ((core->addr_abs & 0xff00) != (hi << 8))
return 1;
return 0;
}
// Address Mode: Indirect
// The supplied 16-bit address is read to get the actual 16-bit address. This is
// instruction is unusual in that it has a bug in the hardware! To emulate its
// function accurately, we also need to emulate this bug. If the low byte of the
// supplied address is 0xFF, then to read the high byte of the actual address
// we need to cross a page boundary. This doesnt actually work on the chip as
// designed, instead it wraps back around in the same page, yielding an
// invalid actual address
uint8_t cpu_instruction_addressing_ind(CPU* core)
{
uint16_t ptr_lo = cpu_bus_read_pc(core);
uint16_t ptr_hi = cpu_bus_read_pc(core);
uint16_t ptr = (ptr_hi << 8) | ptr_lo;
uint16_t addr_lo = cpu_bus_read(core, ptr);
#ifdef INCLUDE_HARDWARE_BUG_IND
if (ptr_lo == 0x00ff)
ptr &= 0xff00;
else
ptr++;
#else
ptr++;
#endif
uint16_t addr_hi = cpu_bus_read(core, ptr);;
core->addr_abs = (addr_hi << 8) | addr_lo;
return 0;
}
// Address Mode: Indirect X
// The supplied 8-bit address is offset by X Register to index
// a location in page 0x00. The actual 16-bit address is read
// from this location
uint8_t cpu_instruction_addressing_izx(CPU* core)
{
uint16_t ptr = cpu_bus_read_pc(core);
ptr += core->registery->x;
uint16_t lo = cpu_bus_read(core, ptr & 0x00ff);
ptr++;
uint16_t hi = cpu_bus_read(core, ptr & 0x00ff);
core->addr_abs = (hi << 8) | lo;
return 0;
}
// Address Mode: Indirect Y
// The supplied 8-bit address indexes a location in page 0x00. From
// here the actual 16-bit address is read, and the contents of
// Y Register is added to it to offset it. If the offset causes a
// change in page then an additional clock cycle is required.
uint8_t cpu_instruction_addressing_izy(CPU* core)
{
uint16_t ptr = cpu_bus_read_pc(core);
uint16_t lo = cpu_bus_read(core, ptr & 0x00ff);
ptr++;
uint16_t hi = cpu_bus_read(core, ptr & 0x00ff);
core->addr_abs = (hi << 8) | lo;
core->addr_abs += core->registery->y;
if ((core->addr_abs & 0xff00) != (hi << 8))
return 1;
return 0;
}
<file_sep>/Emulator.Mapper0/src/emulator/car/mappers/Mapper0.c
#include "Mapper0.h"
#include <emulator/car/mappers/mapper.h>
#include <stdlib.h>
Mapper* mapper_0_provider(uint16_t mapper_id)
{
if (mapper_id != 0)
return NULL;
Mapper* mapper = (Mapper*)malloc(sizeof(Mapper));
if (mapper == NULL)
return NULL;
mapper->type = mapper_id;
mapper->init = &mapper_0_init;
mapper->destroy = &mapper_0_destroy;
mapper->prg_read = &mapper_0_prg_read;
mapper->prg_write = &mapper_0_prg_write;
mapper->chr_read = &mapper_0_chr_read;
mapper->chr_write = &mapper_0_chr_write;
return mapper;
}
bool mapper_0_init(Mapper* mapper)
{
if (mapper->prg_banks < 1 || mapper->prg_banks > 2)
return FALSE;
return TRUE;
}
void mapper_0_destroy(Mapper* mapper)
{
}
uint32_t mapper_0_prg_read(Mapper* mapper, uint16_t addr)
{
if (mapper->prg_banks == 2 || addr < PRG_BANK_SIZE)
return addr;
return addr - PRG_BANK_SIZE;
}
uint32_t mapper_0_prg_write(Mapper* mapper, uint16_t addr)
{
if (mapper->prg_banks == 2 || addr < PRG_BANK_SIZE)
return addr;
return addr - PRG_BANK_SIZE;
}
uint32_t mapper_0_chr_read(Mapper* mapper, uint16_t addr)
{
return addr;
}
uint32_t mapper_0_chr_write(Mapper* mapper, uint16_t addr)
{
return addr;
}
<file_sep>/Emulator.core/src/emulator/cpu/core/Flags.c
#include "Flags.h"
bool flag_read(flags flags, flag flag)
{
return is_true(flags & flag);
}
void flag_write(flags* flags, flag flag, bool value)
{
if (is_true(value))
(*flags) |= flag;
else
(*flags) &= ~flag;
}
void flag_set_active(flags* flags, flag flag)
{
flag_write(flags, flag, TRUE);
}
void flag_set_inactive(flags* flags, flag flag)
{
flag_write(flags, flag, FALSE);
}<file_sep>/Emulator.core/src/emulator/car/core/Cartridge.c
#include "Cartridge.h"
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
Cartridge* cartridge_load_from_file(const char* file, car_mapper_provider provider)
{
FILE* f;
errno_t err = fopen_s(&f, file, "rb");
if (f == NULL || err != 0)
{
#ifdef _DEBUG
printf("Error, couldn't open the file : %s\n", file);
#endif
return NULL;
}
Cartridge* cartridge = (Cartridge*)malloc(sizeof(Cartridge));
if (cartridge == NULL)
{
#ifdef _DEBUG
printf("Error, couldn't allocate memory for the cartridge\n");
#endif
fclose(f);
return NULL;
}
cartridge->mirror = CARTRIDGE_MIRROR_HORIZONTAL;
fread_s(&(cartridge->header), sizeof(CartridgeHeader), sizeof(uint8_t), 16, f);
if (cartridge->header.mapper_1 & 0x04)
fread_s(cartridge->trainer, sizeof(uint8_t) * 512, sizeof(uint8_t), 512, f);
else
memset(cartridge->trainer, 0x00, sizeof(uint8_t) * 512);
cartridge->mapper_id = ((cartridge->header.mapper_2 >> 4) << 4) | (cartridge->header.mapper_1 >> 4);
#ifdef _DEBUG
printf("PRG : %u CHR : %u\n", cartridge->header.size_prg_rom, cartridge->header.size_chr_rom);
#endif
uint8_t nFileType = 1;
uint16_t size_prg, size_chr;
switch (nFileType)
{
case 1:
size_prg = cartridge->header.size_prg_rom * PRG_BANK_SIZE;
size_chr = cartridge->header.size_chr_rom * CHR_BANK_SIZE;
cartridge->prg = (uint8_t*)malloc(sizeof(uint8_t) * size_prg);
if (cartridge->prg == NULL)
{
free(cartridge);
fclose(f);
#ifdef _DEBUG
printf("Error, couldn't allocate memory for the cartridge PGR\n");
#endif
return NULL;
}
cartridge->chr = (uint8_t*)malloc(sizeof(uint8_t) * size_chr);
if (cartridge->chr == NULL)
{
free(cartridge->prg);
free(cartridge);
fclose(f);
#ifdef _DEBUG
printf("Error, couldn't allocate memory for the cartridge CHR\n");
#endif
return NULL;
}
fread_s(cartridge->prg, sizeof(uint8_t) * size_prg, sizeof(uint8_t), size_prg, f);
fread_s(cartridge->chr, sizeof(uint8_t) * size_chr, sizeof(uint8_t), size_chr, f);
break;
default:
free(cartridge);
fclose(f);
#ifdef _DEBUG
printf("Error, incorrect file type : %u\n", nFileType);
#endif
return NULL;
}
fclose(f);
cartridge->mapper = (*provider)(cartridge->mapper_id);
if (cartridge->mapper != NULL)
{
cartridge->mapper->chr_banks = cartridge->header.size_chr_rom;
cartridge->mapper->prg_banks = cartridge->header.size_prg_rom;
if (is_false((*cartridge->mapper->init)(cartridge->mapper)))
{
#ifdef _DEBUG
printf("Error, couldn't initialize mapper %u\n", cartridge->mapper_id);
#endif
free(cartridge->mapper);
cartridge->mapper = NULL;
}
}
if (cartridge->mapper == NULL)
{
#ifdef _DEBUG
printf("Error, couldn't load mapper %u\n", cartridge->mapper_id);
#endif
free(cartridge->chr);
free(cartridge->prg);
free(cartridge);
return NULL;
}
return cartridge;
}
void cartridge_destroy(Cartridge* cartridge)
{
if (cartridge == NULL)
return;
if (cartridge->mapper != NULL)
{
(*cartridge->mapper->destroy)(cartridge->mapper);
free(cartridge->mapper);
}
free(cartridge->chr);
free(cartridge->prg);
cartridge->mapper = NULL;
cartridge->chr = NULL;
cartridge->prg = NULL;
free(cartridge);
}
uint8_t cartridge_read_prg(Cartridge* cartridge, uint16_t addr)
{
uint32_t realAddr = (*cartridge->mapper->prg_read)(cartridge->mapper, addr);
if (realAddr < UINT32_MAX)
return cartridge->prg[realAddr];
return 0x00;
}
void cartridge_write_prg(Cartridge* cartridge, uint16_t addr, uint8_t value)
{
uint32_t realAddr = (*cartridge->mapper->prg_write)(cartridge->mapper, addr);
if (realAddr < UINT32_MAX)
cartridge->prg[realAddr] = value;
}
uint8_t cartridge_read_chr(Cartridge* cartridge, uint16_t addr)
{
uint32_t realAddr = (*cartridge->mapper->chr_read)(cartridge->mapper, addr);
if (realAddr < UINT32_MAX)
return cartridge->chr[realAddr];
return 0x00;
}
void cartridge_write_chr(Cartridge* cartridge, uint16_t addr, uint8_t value)
{
uint32_t realAddr = (*cartridge->mapper->chr_write)(cartridge->mapper, addr);
if (realAddr < UINT32_MAX)
cartridge->chr[realAddr] = value;
}<file_sep>/Utils/src/utils/Boolean.c
#include "Boolean.h"
bool is_true(bool b) {
return b != FALSE;
}
bool is_false(bool b) {
return !is_true(b);
}<file_sep>/Utils/include/utils/boolean.h
#ifndef __UTILS_CORE_BOOLEAN_H__
#define __UTILS_CORE_BOOLEAN_H__
#include <inttypes.h>
#include "utils.h"
#ifndef bool
typedef uint8_t bool;
#endif
#define TRUE 0x01
#define FALSE 0x00
UTILS_API bool is_true(bool b);
UTILS_API bool is_false(bool b);
#endif
<file_sep>/Emulator.core/src/emulator/cpu/bus/Bus.c
#include "Bus.h"
#include <stdlib.h>
#include <string.h>
#include "../core/settings.h"
Bus* bus_init()
{
Bus* bus = (Bus*)malloc(sizeof(Bus));
if (bus == NULL)
return NULL;
memset(bus, 0x00, sizeof(Bus));
size_t ram_size = RAM_END - RAM_START + 1;
bus->ram = (uint8_t*)malloc(sizeof(uint8_t) * ram_size);
if (bus->ram == NULL)
{
free(bus);
return NULL;
}
memset(bus->ram, 0x00, sizeof(uint8_t)* ram_size);
bus->car = NULL;
bus->ppu = ppu_init(&(bus->car));
if (bus->ppu == NULL)
{
free(bus->ram);
free(bus);
return NULL;
}
return bus;
}
uint8_t bus_read(const Bus* bus, uint16_t addr)
{
if (addr >= RAM_START && addr <= RAM_END)
return bus->ram[addr - RAM_START];
if (addr >= RAM_CLONE_START && addr <= RAM_CLONE_END)
return bus_read(bus, (addr & (RAM_END - RAM_START)) + RAM_START);
if (addr >= PPU_START && addr <= PPU_END)
return ppu_read(bus->ppu, addr - PPU_START);
if (addr >= PPU_CLONE_START && addr <= PPU_CLONE_END)
return bus_read(bus, (addr % (PPU_END - PPU_START )) + PPU_START);
if (addr >= CAR_PRG_START && addr <= CAR_PRG_END)
return cartridge_read_prg(bus->car, addr - CAR_PRG_START);
return 0x00;
}
void bus_write(const Bus* bus, uint16_t addr, uint8_t value)
{
if (addr >= RAM_START && addr <= RAM_END)
bus->ram[addr - RAM_START] = value;
else if (addr >= RAM_CLONE_START && addr <= RAM_CLONE_END)
bus_write(bus, (addr & (RAM_END - RAM_START)) + RAM_START, value);
else if (addr >= PPU_START && addr <= PPU_END)
ppu_write(bus->ppu, addr - PPU_START, value);
else if (addr >= PPU_CLONE_START && addr <= PPU_CLONE_END)
bus_write(bus, (addr % (PPU_END - PPU_START)) + PPU_START, value);
else if (addr >= CAR_PRG_START && addr <= CAR_PRG_END)
cartridge_write_prg(bus->car, addr - CAR_PRG_START, value);
}
Cartridge* bus_destroy(Bus* bus)
{
if (bus == NULL)
return;
Cartridge* car = bus->car;
bus->car = NULL;
free(bus->ram);
free(bus);
return car;
}
Cartridge* bus_insert_cartridge(Bus* bus, Cartridge* car)
{
Cartridge* old = bus->car;
bus->car = car;
return old;
}
Cartridge* bus_remove_cartridge(Bus* bus)
{
Cartridge* old = bus->car;
bus->car = NULL;
return old;
}
void bus_tick(Bus* bus)
{
ppu_tick(bus->ppu);
}
<file_sep>/Emulator.MapperProvider/src/emulator/car/mappers/MapperDllprovider.c
#include "MapperDllProvider.h"
#include <emulator/car/mappers/mapper0.h>
Mapper* mapper_dll_provider(uint16_t mapper_id)
{
switch (mapper_id)
{
case 0:
return mapper_0_provider(mapper_id);
default:
return NULL;
}
}<file_sep>/Emulator.core/src/emulator/cpu/core/instructions/operations.h
#ifndef __EMULATOR_CPU_CORE_INSTRUCTIONS_OPERATIONS__
#define __EMULATOR_CPU_CORE_INSTRUCTIONS_OPERATIONS__
#include "../CPU.h"
// Instruction: Add with Carry In
// Function: A = A + M + C
// Flags Out: C, V, N, Z
uint8_t cpu_instruction_operation_adc(CPU* cpu);
// Instruction: Subtraction with Borrow In
// Function: A = A - M - (1 - C)
// Flags Out: C, V, N, Z
uint8_t cpu_instruction_operation_sbc(CPU* cpu);
// Instruction: Bitwise Logic AND
// Function: A = A & M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_and(CPU* cpu);
// Instruction: Arithmetic Shift Left
// Function: A = C <- (A << 1) <- 0
// Flags Out: N, Z, C
uint8_t cpu_instruction_operation_asl(CPU* cpu);
// Instruction: Branch if Carry Clear
// Function: if(C == 0) pc = address
uint8_t cpu_instruction_operation_bcc(CPU* cpu);
// Instruction: Branch if Carry Set
// Function: if(C == 1) pc = address
uint8_t cpu_instruction_operation_bcs(CPU* cpu);
// Instruction: Branch if Equal
// Function: if(Z == 1) pc = address
uint8_t cpu_instruction_operation_beq(CPU* cpu);
// Instruction: Test Bits in Memory with Accumulator
// Function: A AND M, M7 -> N, M6 -> V
uint8_t cpu_instruction_operation_bit(CPU* cpu);
// Instruction: Branch if Negative
// Function: if(N == 1) pc = address
uint8_t cpu_instruction_operation_bmi(CPU* cpu);
// Instruction: Branch if Not Equal
// Function: if(Z == 0) pc = addres
uint8_t cpu_instruction_operation_bne(CPU* cpu);
// Instruction: Branch if Positive
// Function: if(N == 0) pc = address
uint8_t cpu_instruction_operation_bpl(CPU* cpu);
// Instruction: Break
// Function: Program Sourced Interrupt
uint8_t cpu_instruction_operation_brk(CPU* cpu);
// Instruction: Branch if Overflow Clear
// Function: if(V == 0) pc = address
uint8_t cpu_instruction_operation_bvc(CPU* cpu);
// Instruction: Branch if Overflow Set
// Function: if(V == 1) pc = address
uint8_t cpu_instruction_operation_bvs(CPU* cpu);
// Instruction: Clear Carry Flag
// Function: C = 0
uint8_t cpu_instruction_operation_clc(CPU* cpu);
// Instruction: Clear Decimal Flag
// Function: D = 0
uint8_t cpu_instruction_operation_cld(CPU* cpu);
// Instruction: Disable Interrupts / Clear Interrupt Flag
// Function: I = 0
uint8_t cpu_instruction_operation_cli(CPU* cpu);
// Instruction: Clear Overflow Flag
// Function: V = 0
uint8_t cpu_instruction_operation_clv(CPU* cpu);
// Instruction: Compare Accumulator
// Function: C <- A >= M Z <- (A - M) == 0
// Flags Out: N, C, Z
uint8_t cpu_instruction_operation_cmp(CPU* cpu);
// Instruction: Compare X Register
// Function: C <- X >= M Z <- (X - M) == 0
// Flags Out: N, C, Z
uint8_t cpu_instruction_operation_cpx(CPU* cpu);
// Instruction: Compare Y Register
// Function: C <- Y >= M Z <- (Y - M) == 0
// Flags Out: N, C, Z
uint8_t cpu_instruction_operation_cpy(CPU* cpu);
// Instruction: Decrement Value at Memory Location
// Function: M = M - 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_dec(CPU* cpu);
// Instruction: Decrement X Register
// Function: X = X - 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_dex(CPU* cpu);
// Instruction: Decrement Y Register
// Function: Y = Y - 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_dey(CPU* cpu);
// Instruction: Bitwise Logic XOR
// Function: A = A xor M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_eor(CPU* cpu);
// Instruction: Increment Value at Memory Location
// Function: M = M + 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_inc(CPU* cpu);
// Instruction: Increment X Register
// Function: X = X + 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_inx(CPU* cpu);
// Instruction: Increment Y Register
// Function: Y = Y + 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_iny(CPU* cpu);
// Instruction: Jump To Location
// Function: pc = address
uint8_t cpu_instruction_operation_jmp(CPU* cpu);
// Instruction: Jump To Sub-Routine
// Function: Push current pc to stack, pc = address
uint8_t cpu_instruction_operation_jsr(CPU* cpu);
// Instruction: Load The Accumulator
// Function: A = M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_lda(CPU* cpu);
// Instruction: Load The X Register
// Function: X = M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_ldx(CPU* cpu);
// Instruction: Load The Y Register
// Function: Y = M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_ldy(CPU* cpu);
// Instruction: Shift One Bit Right (Memory or Accumulator)
// Function: 0 -> [76543210] -> C
uint8_t cpu_instruction_operation_lsr(CPU* cpu);
// Instruction: No Operation
uint8_t cpu_instruction_operation_nop(CPU* cpu);
// Instruction: Bitwise Logic OR
// Function: A = A | M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_ora(CPU* cpu);
// Instruction: Push Accumulator to Stack
// Function: A -> stack
uint8_t cpu_instruction_operation_pha(CPU* cpu);
// Instruction: Push Status Register to Stack
// Function: status -> stack
// Note: Break flag is set to 1 before push
uint8_t cpu_instruction_operation_php(CPU* cpu);
// Instruction: Pop Accumulator off Stack
// Function: A <- stack
// Flags Out: N, Z
uint8_t cpu_instruction_operation_pla(CPU* cpu);
// Instruction: Pop Status Register off Stack
// Function: Status <- stack
uint8_t cpu_instruction_operation_plp(CPU* cpu);
// Instruction: Rotate One Bit Left (Memory or Accumulator)
// C <- [76543210] <- C
uint8_t cpu_instruction_operation_rol(CPU* cpu);
// Instruction: Rotate One Bit Right (Memory or Accumulator)
// C -> [76543210] -> C
uint8_t cpu_instruction_operation_ror(CPU* cpu);
// Instruction: Return from Interrupt
// Function: pull SR, pull PC
uint8_t cpu_instruction_operation_rti(CPU* cpu);
// Instruction: Return from Subroutine
// Function: pull PC, PC+1 -> PC
uint8_t cpu_instruction_operation_rts(CPU* cpu);
// Instruction: Set Carry Flag
// Function: C = 1
uint8_t cpu_instruction_operation_sec(CPU* cpu);
// Instruction: Set Decimal Flag
// Function: D = 1
uint8_t cpu_instruction_operation_sed(CPU* cpu);
// Instruction: Set Interrupt Flag / Enable Interrupts
// Function: I = 1
uint8_t cpu_instruction_operation_sei(CPU* cpu);
// Instruction: Store Accumulator at Address
// Function: M = A
uint8_t cpu_instruction_operation_sta(CPU* cpu);
// Instruction: Store X Register at Address
// Function: M = X
uint8_t cpu_instruction_operation_stx(CPU* cpu);
// Instruction: Store Y Register at Address
// Function: M = Y
uint8_t cpu_instruction_operation_sty(CPU* cpu);
// Instruction: Transfer Accumulator to X Register
// Function: X = A
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tax(CPU* cpu);
// Instruction: Transfer Accumulator to Y Register
// Function: Y = A
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tay(CPU* cpu);
// Instruction: Transfer Stack Pointer to X Register
// Function: X = stack pointer
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tsx(CPU* cpu);
// Instruction: Transfer X Register to Accumulator
// Function: A = X
// Flags Out: N, Z
uint8_t cpu_instruction_operation_txa(CPU* cpu);
// Instruction: Transfer X Register to Stack Pointer
// Function: stack pointer = X
uint8_t cpu_instruction_operation_txs(CPU* cpu);
// Instruction: Transfer Y Register to Accumulator
// Function: A = Y
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tya(CPU* cpu);
uint8_t cpu_instruction_operation_lax(CPU* cpu);
uint8_t cpu_instruction_operation_sax(CPU* cpu);
uint8_t cpu_instruction_operation_dcp(CPU* cpu);
uint8_t cpu_instruction_operation_isc(CPU* cpu);
uint8_t cpu_instruction_operation_rla(CPU* cpu);
uint8_t cpu_instruction_operation_rra(CPU* cpu);
uint8_t cpu_instruction_operation_slo(CPU* cpu);
uint8_t cpu_instruction_operation_sre(CPU* cpu);
// This function captures illegal opcodes
uint8_t cpu_instruction_operation_xxx(CPU* cpu);
#endif<file_sep>/Emulator.core/include/emulator/emulator_core.h
#ifndef EMULATOR_API
#ifdef EMULATOR_CORE_DLL
#define EMULATOR_API __declspec(dllexport)
#else
#define EMULATOR_API __declspec(dllimport)
#endif
#endif<file_sep>/Emulator.core/include/emulator/car/mappers/mapperProvider.h
#ifndef __DLL_EMULATOR_CAR_MAPPERS_MAPPER_PROVIDER_H__
#define __DLL_EMULATOR_CAR_MAPPERS_MAPPER_PROVIDER_H__
#include <emulator/emulator_core.h>
#include <inttypes.h>
typedef struct mapper Mapper;
typedef Mapper* (*car_mapper_provider)(uint16_t mapper_id);
#endif<file_sep>/Emulator.core/src/utils/boolean.h
#ifndef __UTILS_BOOLEAN_H__
#define __UTILS_BOOLEAN_H__
#include <inttypes.h>
#ifndef bool
typedef uint8_t bool;
#endif
#define TRUE 0x01
#define FALSE 0x00
bool is_true(bool b);
bool is_false(bool b);
#endif
<file_sep>/Emulator.core/include/emulator/car/mappers/mapper.h
#ifndef __DLL_EMULATOR_CAR_MAPPERS_MAPPER_H__
#define __DLL_EMULATOR_CAR_MAPPERS_MAPPER_H__
#include <emulator/emulator_core.h>
#include <utils/boolean.h>
#include <inttypes.h>
#define PRG_BANK_SIZE 16384
#define CHR_BANK_SIZE 8192
typedef struct mapper Mapper;
typedef bool(*car_mapper_init)(Mapper* mapper);
typedef void(*car_mapper_destroy)(Mapper* mapper);
typedef uint32_t(*car_mapper_map)(Mapper* mapper, uint16_t address);
typedef struct mapper {
uint16_t type;
uint8_t prg_banks;
uint8_t chr_banks;
void* user_ptr;
car_mapper_init init;
car_mapper_destroy destroy;
car_mapper_map prg_read;
car_mapper_map prg_write;
car_mapper_map chr_read;
car_mapper_map chr_write;
} Mapper;
#endif<file_sep>/Emulator.core/src/emulator/cpu/core/instructions/cpu_instructions.h
#ifndef __EMULATOR_CPU_CORE_INSTRUCTIONS_INSTRUCTIONS_H__
#define __EMULATOR_CPU_CORE_INSTRUCTIONS_INSTRUCTIONS_H__
#include "operations.h"
#include "addressing.h"
typedef uint8_t(*cpu_instruction_addressing)(CPU*);
typedef uint8_t(*cpu_instruction_operation)(CPU*);
static const struct instruction {
cpu_instruction_addressing addr_mode;
cpu_instruction_operation operation;
uint8_t cycles;
} lookup[256] = {
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_brk, 7 }, //00
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_ora, 6 }, //01
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //02
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_slo, 8 }, //03
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_nop, 3 }, //04
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_ora, 3 }, //05
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_asl, 5 }, //06
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_slo, 5 }, //07
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_php, 3 }, //08
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_ora, 2 }, //09
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_asl, 2 }, //0A
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //0B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //0C
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_ora, 4 }, //0D
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_asl, 6 }, //0E
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_slo, 6 }, //0F
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bpl, 2 }, //10
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_ora, 5 }, //11
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //12
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_slo, 8 }, //13
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_nop, 4 }, //14
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_ora, 4 }, //15
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_asl, 6 }, //16
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_slo, 6 }, //17
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_clc, 2 }, //18
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_ora, 4 }, //19
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //1A
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_slo, 7 }, //1B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //1C
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_ora, 4 }, //1D
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_asl, 7 }, //1E
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_slo, 7 }, //1F
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_jsr, 6 }, //20
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_and, 6 }, //21
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //22
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_rla, 8 }, //23
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_bit, 3 }, //24
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_and, 3 }, //25
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_rol, 5 }, //26
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_rla, 5 }, //27
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_plp, 4 }, //28
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_and, 2 }, //29
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_rol, 2 }, //2A
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //2B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_bit, 4 }, //2C
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_and, 4 }, //2D
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_rol, 6 }, //2E
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_rla, 6 }, //2F
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bmi, 2 }, //30
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_and, 5 }, //31
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //32
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_rla, 8 }, //33
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_nop, 4 }, //34
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_and, 4 }, //35
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_rol, 6 }, //36
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_rla, 6 }, //37
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_sec, 2 }, //38
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_and, 4 }, //39
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //3A
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_rla, 7 }, //3B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //3C
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_and, 4 }, //3D
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_rol, 7 }, //3E
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_rla, 7 }, //3F
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_rti, 6 }, //40
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_eor, 6 }, //41
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //42
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_sre, 8 }, //43
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_nop, 3 }, //44
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_eor, 3 }, //45
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_lsr, 5 }, //46
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_sre, 5 }, //47
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_pha, 3 }, //48
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_eor, 2 }, //49
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_lsr, 2 }, //4A
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //4B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_jmp, 3 }, //4C
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_eor, 4 }, //4D
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_lsr, 6 }, //4E
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_sre, 6 }, //4F
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bvc, 2 }, //50
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_eor, 5 }, //51
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //52
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_sre, 8 }, //53
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_nop, 4 }, //54
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_eor, 4 }, //55
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_lsr, 6 }, //56
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_sre, 6 }, //57
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_cli, 2 }, //58
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_eor, 4 }, //59
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //5A
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_sre, 7 }, //5B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //5C
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_eor, 4 }, //5D
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_lsr, 7 }, //5E
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_sre, 7 }, //5F
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_rts, 6 }, //60
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_adc, 6 }, //61
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //62
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_rra, 8 }, //63
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_nop, 3 }, //64
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_adc, 3 }, //65
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_ror, 5 }, //66
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_rra, 5 }, //67
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_pla, 4 }, //68
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_adc, 2 }, //69
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_ror, 2 }, //6A
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //6B
{ &cpu_instruction_addressing_ind, &cpu_instruction_operation_jmp, 5 }, //6C
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_adc, 4 }, //6D
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_ror, 6 }, //6E
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_rra, 6 }, //6F
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bvs, 2 }, //70
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_adc, 5 }, //71
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //72
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_rra, 8 }, //73
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_nop, 4 }, //74
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_adc, 4 }, //75
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_ror, 6 }, //76
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_rra, 6 }, //77
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_sei, 2 }, //78
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_adc, 4 }, //79
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //7A
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_rra, 7 }, //7B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //7C
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_adc, 4 }, //7D
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_ror, 7 }, //7E
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_rra, 7 }, //7F
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_nop, 2 }, //80
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_sta, 6 }, //81
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //82
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_sax, 6 }, //83
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_sty, 3 }, //84
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_sta, 3 }, //85
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_stx, 3 }, //86
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_sax, 3 }, //87
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_dey, 2 }, //88
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //89
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_txa, 2 }, //8A
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //8B
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_sty, 4 }, //8C
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_sta, 4 }, //8D
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_stx, 4 }, //8E
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_sax, 4 }, //8F
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bcc, 2 }, //90
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_sta, 6 }, //91
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //92
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_xxx, 6 }, //93
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_sty, 4 }, //94
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_sta, 4 }, //95
{ &cpu_instruction_addressing_zpy, &cpu_instruction_operation_stx, 4 }, //96
{ &cpu_instruction_addressing_zpy, &cpu_instruction_operation_sax, 4 }, //97
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_tya, 2 }, //98
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_sta, 5 }, //99
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_txs, 2 }, //9A
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 5 }, //9B
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 5 }, //9C
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_sta, 5 }, //9D
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 5 }, //9E
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 5 }, //9F
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_ldy, 2 }, //A0
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_lda, 6 }, //A1
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_ldx, 2 }, //A2
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_lax, 6 }, //A3
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_ldy, 3 }, //A4
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_lda, 3 }, //A5
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_ldx, 3 }, //A6
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_lax, 3 }, //A7
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_tay, 2 }, //A8
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_lda, 2 }, //A9
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_tax, 2 }, //AA
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_lax, 2 }, //AB
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_ldy, 4 }, //AC
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_lda, 4 }, //AD
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_ldx, 4 }, //AE
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_lax, 4 }, //AF
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bcs, 2 }, //B0
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_lda, 5 }, //B1
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //B2
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_lax, 5 }, //B3
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_ldy, 4 }, //B4
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_lda, 4 }, //B5
{ &cpu_instruction_addressing_zpy, &cpu_instruction_operation_ldx, 4 }, //B6
{ &cpu_instruction_addressing_zpy, &cpu_instruction_operation_lax, 4 }, //B7
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_clv, 2 }, //B8
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_lda, 4 }, //B9
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_tsx, 2 }, //BA
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 4 }, //BB
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_ldy, 4 }, //BC
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_lda, 4 }, //BD
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_ldx, 4 }, //BE
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_lax, 4 }, //BF
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_cpy, 2 }, //C0
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_cmp, 6 }, //C1
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //C2
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_dcp, 8 }, //C3
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_cpy, 3 }, //C4
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_cmp, 3 }, //C5
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_dec, 5 }, //C6
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_dcp, 5 }, //C7
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_iny, 2 }, //C8
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_cmp, 2 }, //C9
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_dex, 2 }, //CA
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //CB
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_cpy, 4 }, //CC
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_cmp, 4 }, //CD
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_dec, 6 }, //CE
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_dcp, 6 }, //CF
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_bne, 2 }, //D0
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_cmp, 5 }, //D1
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //D2
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_dcp, 8 }, //D3
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_nop, 4 }, //D4
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_cmp, 4 }, //D5
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_dec, 6 }, //D6
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_dcp, 6 }, //D7
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_cld, 2 }, //D8
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_cmp, 4 }, //D9
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //DA
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_dcp, 7 }, //DB
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //DC
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_cmp, 4 }, //DD
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_dec, 7 }, //DE
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_dcp, 7 }, //DF
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_cpx, 2 }, //E0
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_sbc, 6 }, //E1
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //E2
{ &cpu_instruction_addressing_izx, &cpu_instruction_operation_isc, 8 }, //E3
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_cpx, 3 }, //E4
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_sbc, 3 }, //E5
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_inc, 5 }, //E6
{ &cpu_instruction_addressing_zp0, &cpu_instruction_operation_isc, 5 }, //E7
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_inx, 2 }, //E8
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_sbc, 2 }, //E9
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //EA
{ &cpu_instruction_addressing_imm, &cpu_instruction_operation_sbc, 2 }, //EB
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_cpx, 4 }, //EC
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_sbc, 4 }, //ED
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_inc, 6 }, //EE
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_isc, 6 }, //EF
{ &cpu_instruction_addressing_rel, &cpu_instruction_operation_beq, 2 }, //F0
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_sbc, 5 }, //F1
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_xxx, 2 }, //F2
{ &cpu_instruction_addressing_izy, &cpu_instruction_operation_isc, 8 }, //F3
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_nop, 4 }, //F4
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_sbc, 4 }, //F5
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_inc, 6 }, //F6
{ &cpu_instruction_addressing_zpx, &cpu_instruction_operation_isc, 6 }, //F7
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_sed, 2 }, //F8
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_sbc, 4 }, //F9
{ &cpu_instruction_addressing_imp, &cpu_instruction_operation_nop, 2 }, //FA
{ &cpu_instruction_addressing_aby, &cpu_instruction_operation_isc, 7 }, //FB
{ &cpu_instruction_addressing_abs, &cpu_instruction_operation_nop, 4 }, //FC
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_sbc, 4 }, //FD
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_inc, 7 }, //FE
{ &cpu_instruction_addressing_abx, &cpu_instruction_operation_isc, 7 } //FF
};
#endif<file_sep>/Emulator.core/src/emulator/ppu/bus/TblName.c
#include "TblName.h"
#include <stdlib.h>
#include "../../car/core/Cartridge.h"
Tbl_Name* tbl_name_init()
{
Tbl_Name* tbl_name = (Tbl_Name*)malloc(sizeof(Tbl_Name));
if (tbl_name == NULL)
return NULL;
tbl_name->table_1 = (uint8_t*)malloc(sizeof(uint8_t) * 1024);
if (tbl_name->table_1 == NULL)
{
free(tbl_name);
return NULL;
}
tbl_name->table_2 = (uint8_t*)malloc(sizeof(uint8_t) * 1024);
if (tbl_name->table_2 == NULL)
{
free(tbl_name->table_1);
free(tbl_name);
return NULL;
}
return tbl_name;
}
void tbl_name_destroy(Tbl_Name* tbl_name)
{
if (tbl_name == NULL)
return;
free(tbl_name->table_1);
free(tbl_name->table_2);
free(tbl_name);
}
uint8_t tbl_name_read(Tbl_Name* tbl_name, uint16_t addr, uint8_t mirror)
{
addr &= 0x0FFF;
if (mirror == CARTRIDGE_MIRROR_VERTICAL)
{
if (addr >= 0x0000 && addr <= 0x03FF)
return tbl_name->table_1[addr & 0x03FF];
if (addr >= 0x0400 && addr <= 0x07FF)
return tbl_name->table_2[addr & 0x03FF];
if (addr >= 0x0800 && addr <= 0x0BFF)
return tbl_name->table_1[addr & 0x03FF];
if (addr >= 0x0C00 && addr <= 0x0FFF)
return tbl_name->table_2[addr & 0x03FF];
}
else if (mirror == CARTRIDGE_MIRROR_HORIZONTAL)
{
if (addr >= 0x0000 && addr <= 0x03FF)
return tbl_name->table_1[addr & 0x03FF];
if (addr >= 0x0400 && addr <= 0x07FF)
return tbl_name->table_1[addr & 0x03FF];
if (addr >= 0x0800 && addr <= 0x0BFF)
return tbl_name->table_2[addr & 0x03FF];
if (addr >= 0x0C00 && addr <= 0x0FFF)
return tbl_name->table_2[addr & 0x03FF];
}
}
void tbl_name_write(Tbl_Name* tbl_name, uint16_t addr, uint8_t value, uint8_t mirror)
{
addr &= 0x0FFF;
if (mirror == CARTRIDGE_MIRROR_VERTICAL)
{
if (addr >= 0x0000 && addr <= 0x03FF)
tbl_name->table_1[addr & 0x03FF] = value;
if (addr >= 0x0400 && addr <= 0x07FF)
tbl_name->table_2[addr & 0x03FF] = value;
if (addr >= 0x0800 && addr <= 0x0BFF)
tbl_name->table_1[addr & 0x03FF] = value;
if (addr >= 0x0C00 && addr <= 0x0FFF)
tbl_name->table_2[addr & 0x03FF] = value;
}
else if (mirror == CARTRIDGE_MIRROR_HORIZONTAL)
{
if (addr >= 0x0000 && addr <= 0x03FF)
tbl_name->table_1[addr & 0x03FF] = value;
if (addr >= 0x0400 && addr <= 0x07FF)
tbl_name->table_1[addr & 0x03FF] = value;
if (addr >= 0x0800 && addr <= 0x0BFF)
tbl_name->table_2[addr & 0x03FF] = value;
if (addr >= 0x0C00 && addr <= 0x0FFF)
tbl_name->table_2[addr & 0x03FF] = value;
}
}
<file_sep>/Emulator.core/src/emulator/cpu/core/Registery.c
#include "Registery.h"
#include <stdlib.h>
#include <string.h>
Registery* registery_init()
{
Registery* registery = (Registery*)malloc(sizeof(Registery));
if (registery == NULL)
return NULL;
memset(registery, 0x00, sizeof(Registery));
return registery;
}
void registery_destroy(Registery* registery)
{
if (registery == NULL)
return;
free(registery);
}
void status_set(Registery* registery, flag flag, bool value)
{
flag_write(&(registery->s), flag, value);
}
bool status_get(const Registery* registery, flag flag)
{
return flag_read(registery->s, flag);
}<file_sep>/Emulator.core/src/emulator/cpu/core/Flags.h
#ifndef __EMULATOR_CPU_CORE_FLAGS__
#define __EMULATOR_CPU_CORE_FLAGS__
#include <inttypes.h>
#include <utils/boolean.h>
typedef uint8_t flag;
typedef uint8_t flags;
#define FLAG_C 0x01 // Carry Bit
#define FLAG_Z 0x02 // Zero
#define FLAG_I 0x04 // Disable Interrupts
#define FLAG_D 0x08 // Decimal Mode (unused in this implementation)
#define FLAG_B 0x10 // Break
#define FLAG_U 0x20 // Unused
#define FLAG_V 0x40 // Overflow
#define FLAG_N 0x80 // Negative
bool flag_read(flags flags, flag flag);
void flag_write(flags* flags, flag flag, bool value);
void flag_set_active(flags* flags, flag flag);
void flag_set_inactive(flags* flags, flag flag);
#endif<file_sep>/Emulator.core/src/emulator/ppu/bus/TblName.h
#ifndef __EMULATOR_PPU_BUS_TBL_NAME_H__
#define __EMULATOR_PPU_BUS_TBL_NAME_H__
#include <inttypes.h>
typedef struct tbl_name
{
uint8_t * table_1;
uint8_t * table_2;
}Tbl_Name;
Tbl_Name * tbl_name_init();
void tbl_name_destroy(Tbl_Name* tbl_name);
uint8_t tbl_name_read(Tbl_Name* tbl_name, uint16_t addr, uint8_t mirror);
void tbl_name_write(Tbl_Name* tbl_name, uint16_t addr, uint8_t value, uint8_t mirror);
#endif<file_sep>/Emulator.core/src/emulator/cpu/core/instructions/operations.c
#include "operations.h"
#include "addressing.h"
#include "cpu_instructions.h"
#include "../settings.h"
// helper function
// fetches the value from the bus into the alu
void fetch(CPU* cpu)
{
if (!(lookup[cpu->opt_code].addr_mode == &cpu_instruction_addressing_imp))
cpu->alu = cpu_bus_read(cpu, cpu->addr_abs);
}
// Instruction: Add with Carry In
// Function: A = A + M + C
// Flags Out: C, V, N, Z
uint8_t cpu_instruction_operation_adc(CPU* cpu)
{
fetch(cpu);
cpu->temp = (uint16_t)cpu->registery->a + (uint16_t)cpu->alu;
if (is_true(cpu_status_get(cpu, FLAG_C)))
cpu->temp += 1;
cpu_status_set(cpu, FLAG_C, cpu->temp > 255);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0);
cpu_status_set(cpu, FLAG_V, ((~((uint16_t)cpu->registery->a ^ (uint16_t)cpu->alu) & ((uint16_t)cpu->registery->a ^ (uint16_t)cpu->temp)) & 0x0080) > 0);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x80) > 0);
cpu->registery->a = cpu->temp & 0x00ff;
return 1;
}
// Instruction: Subtraction with Borrow In
// Function: A = A - M - (1 - C)
// Flags Out: C, V, N, Z
uint8_t cpu_instruction_operation_sbc(CPU* cpu)
{
fetch(cpu);
uint16_t value = ((uint16_t)cpu->alu) ^ 0x00ff;
cpu->temp = (uint16_t)cpu->registery->a + (uint16_t)value;
if (is_true(cpu_status_get(cpu, FLAG_C)))
cpu->temp += 1;
cpu_status_set(cpu, FLAG_C, (cpu->temp & 0xff00) > 0);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0);
cpu_status_set(cpu, FLAG_V, ((cpu->temp ^ (uint16_t)cpu->registery->a) & (cpu->temp ^ value) & 0x0080) > 0);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x80) > 0);
cpu->registery->a = cpu->temp & 0x00ff;
return 1;
}
// Instruction: Bitwise Logic AND
// Function: A = A & M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_and(CPU* cpu)
{
fetch(cpu);
cpu->registery->a &= cpu->alu;
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 1;
}
// Instruction: Arithmetic Shift Left
// Function: A = C <- (A << 1) <- 0
// Flags Out: N, Z, C
uint8_t cpu_instruction_operation_asl(CPU* cpu)
{
fetch(cpu);
cpu->temp = ((uint16_t)cpu->alu) << 1;
cpu_status_set(cpu, FLAG_C, (cpu->temp & 0xff00) > 0);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
if (lookup[cpu->opt_code].addr_mode == &cpu_instruction_addressing_imp)
cpu->registery->a = cpu->temp & 0x00ff;
else
cpu_bus_write(cpu, cpu->addr_abs, cpu->temp & 0x00ff);
return 0;
}
// Instruction: Branch if Carry Clear
// Function: if(C == 0) pc = address
uint8_t cpu_instruction_operation_bcc(CPU* cpu)
{
if (is_true(cpu_status_get(cpu, FLAG_C)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Branch if Carry Set
// Function: if(C == 1) pc = address
uint8_t cpu_instruction_operation_bcs(CPU* cpu)
{
if (is_false(cpu_status_get(cpu, FLAG_C)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Branch if Equal
// Function: if(Z == 1) pc = address
uint8_t cpu_instruction_operation_beq(CPU* cpu)
{
if (is_false(cpu_status_get(cpu, FLAG_Z)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Test Bits in Memory with Accumulator
// Function: A AND M, M7 -> N, M6 -> V
uint8_t cpu_instruction_operation_bit(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->registery->a & cpu->alu;
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->alu & (1 << 7)) > 0);
cpu_status_set(cpu, FLAG_V, (cpu->alu & (1 << 6)) > 0);
return 0;
}
// Instruction: Branch if Negative
// Function: if(N == 1) pc = address
uint8_t cpu_instruction_operation_bmi(CPU* cpu)
{
if (is_false(cpu_status_get(cpu, FLAG_N)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Branch if Not Equal
// Function: if(Z == 0) pc = addres
uint8_t cpu_instruction_operation_bne(CPU* cpu)
{
if (is_true(cpu_status_get(cpu, FLAG_Z)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Branch if Not Equal
// Function: if(Z == 0) pc = address
uint8_t cpu_instruction_operation_bpl(CPU* cpu)
{
if (is_true(cpu_status_get(cpu, FLAG_N)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Break
// Function: Program Sourced Interrupt
uint8_t cpu_instruction_operation_brk(CPU* cpu)
{
cpu->registery->pc++;
cpu_status_set(cpu, FLAG_I, TRUE);
cpu_stack_push(cpu, (cpu->registery->pc >> 8) & 0x00ff);
cpu_stack_push(cpu, cpu->registery->pc & 0x00ff);
cpu_status_set(cpu, FLAG_B, TRUE);
cpu_stack_push(cpu, cpu->registery->s);
cpu_status_set(cpu, FLAG_B, FALSE);
uint16_t lo = cpu_bus_read(cpu, IRQ_ADDRESS);
uint16_t hi = cpu_bus_read(cpu, IRQ_ADDRESS + 1);
cpu->registery->pc = (hi << 8) | lo;
return 0;
}
// Instruction: Branch if Overflow Clear
// Function: if(V == 0) pc = address
uint8_t cpu_instruction_operation_bvc(CPU* cpu)
{
if (is_true(cpu_status_get(cpu, FLAG_V)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Branch if Overflow Set
// Function: if(V == 1) pc = address
uint8_t cpu_instruction_operation_bvs(CPU* cpu)
{
if (is_false(cpu_status_get(cpu, FLAG_V)))
return 0;
cpu->cycles++;
cpu->addr_abs = cpu->registery->pc + cpu->addr_rel;
if ((cpu->addr_abs & 0xff00) != (cpu->registery->pc & 0xff00))
cpu->cycles++;
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Clear Carry Flag
// Function: C = 0
uint8_t cpu_instruction_operation_clc(CPU* cpu)
{
cpu_status_set(cpu, FLAG_C, FALSE);
return 0;
}
// Instruction: Clear Decimal Flag
// Function: D = 0
uint8_t cpu_instruction_operation_cld(CPU* cpu)
{
cpu_status_set(cpu, FLAG_D, FALSE);
return 0;
}
// Instruction: Disable Interrupts / Clear Interrupt Flag
// Function: I = 0
uint8_t cpu_instruction_operation_cli(CPU* cpu)
{
cpu_status_set(cpu, FLAG_I, FALSE);
return 0;
}
// Instruction: Clear Overflow Flag
// Function: V = 0
uint8_t cpu_instruction_operation_clv(CPU* cpu)
{
cpu_status_set(cpu, FLAG_V, FALSE);
return 0;
}
// Instruction: Compare Accumulator
// Function: C <- A >= M Z <- (A - M) == 0
// Flags Out: N, C, Z
uint8_t cpu_instruction_operation_cmp(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->registery->a;
cpu->temp -= cpu->alu;
cpu_status_set(cpu, FLAG_C, cpu->registery->a >= cpu->alu);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
return 1;
}
// Instruction: Compare X Register
// Function: C <- X >= M Z <- (X - M) == 0
// Flags Out: N, C, Z
uint8_t cpu_instruction_operation_cpx(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->registery->x;
cpu->temp -= cpu->alu;
cpu_status_set(cpu, FLAG_C, cpu->registery->x >= cpu->alu);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
return 0;
}
// Instruction: Compare Y Register
// Function: C <- Y >= M Z <- (Y - M) == 0
// Flags Out: N, C, Z
uint8_t cpu_instruction_operation_cpy(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->registery->y;
cpu->temp -= cpu->alu;
cpu_status_set(cpu, FLAG_C, cpu->registery->y >= cpu->alu);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
return 0;
}
// Instruction: Decrement Value at Memory Location
// Function: M = M - 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_dec(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->alu - 1;
cpu_bus_write(cpu, cpu->addr_abs, cpu->temp & 0x00ff);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
return 0;
}
// Instruction: Decrement X Register
// Function: X = X - 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_dex(CPU* cpu)
{
cpu->registery->x--;
cpu_status_set(cpu, FLAG_Z, cpu->registery->x == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->x & 0x80) > 0);
return 0;
}
// Instruction: Decrement Y Register
// Function: Y = Y - 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_dey(CPU* cpu)
{
cpu->registery->y--;
cpu_status_set(cpu, FLAG_Z, cpu->registery->y == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->y & 0x80) > 0);
return 0;
}
// Instruction: Bitwise Logic XOR
// Function: A = A xor M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_eor(CPU* cpu)
{
fetch(cpu);
cpu->registery->a ^= cpu->alu;
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 1;
}
// Instruction: Increment Value at Memory Location
// Function: M = M + 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_inc(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->alu + 1;
cpu_bus_write(cpu, cpu->addr_abs, cpu->temp & 0x00ff);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
return 0;
}
// Instruction: Increment X Register
// Function: X = X + 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_inx(CPU* cpu)
{
cpu->registery->x++;
cpu_status_set(cpu, FLAG_Z, cpu->registery->x == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->x & 0x80) > 0);
return 0;
}
// Instruction: Increment Y Register
// Function: Y = Y + 1
// Flags Out: N, Z
uint8_t cpu_instruction_operation_iny(CPU* cpu)
{
cpu->registery->y++;
cpu_status_set(cpu, FLAG_Z, cpu->registery->y == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->y & 0x80) > 0);
return 0;
}
// Instruction: Jump To Location
// Function: pc = address
uint8_t cpu_instruction_operation_jmp(CPU* cpu)
{
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Jump To Sub-Routine
// Function: Push current pc to stack, pc = address
uint8_t cpu_instruction_operation_jsr(CPU* cpu)
{
cpu->registery->pc--;
cpu_stack_push(cpu, (cpu->registery->pc >> 8) & 0x00ff);
cpu_stack_push(cpu, cpu->registery->pc & 0x00ff);
cpu->registery->pc = cpu->addr_abs;
return 0;
}
// Instruction: Load The Accumulator
// Function: A = M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_lda(CPU* cpu)
{
fetch(cpu);
cpu->registery->a = cpu->alu;
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 1;
}
// Instruction: Load The X Register
// Function: X = M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_ldx(CPU* cpu)
{
fetch(cpu);
cpu->registery->x = cpu->alu;
cpu_status_set(cpu, FLAG_Z, cpu->registery->x == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->x & 0x80) > 0);
return 1;
}
// Instruction: Load The Y Register
// Function: Y = M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_ldy(CPU* cpu)
{
fetch(cpu);
cpu->registery->y = cpu->alu;
cpu_status_set(cpu, FLAG_Z, cpu->registery->y == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->y & 0x80) > 0);
return 1;
}
// Instruction: Shift One Bit Right (Memory or Accumulator)
// Function: 0 -> [76543210] -> C
uint8_t cpu_instruction_operation_lsr(CPU* cpu)
{
fetch(cpu);
cpu_status_set(cpu, FLAG_C, (cpu->alu & 0x0001) > 0);
cpu->temp = cpu->alu >> 1;
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x0000);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
if (lookup[cpu->opt_code].addr_mode == &cpu_instruction_addressing_imp)
cpu->registery->a = cpu->temp & 0x00ff;
else
cpu_bus_write(cpu, cpu->addr_abs, cpu->temp & 0x00ff);
return 0;
}
// Instruction: No Operation
uint8_t cpu_instruction_operation_nop(CPU* cpu)
{
switch (cpu->opt_code) {
case 0x1C:
case 0x3C:
case 0x5C:
case 0x7C:
case 0xDC:
case 0xFC:
return 1;
}
return 0;
}
// Instruction: Bitwise Logic OR
// Function: A = A | M
// Flags Out: N, Z
uint8_t cpu_instruction_operation_ora(CPU* cpu)
{
fetch(cpu);
cpu->registery->a |= cpu->alu;
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 1;
}
// Instruction: Push Accumulator to Stack
// Function: A -> stack
uint8_t cpu_instruction_operation_pha(CPU* cpu)
{
cpu_stack_push(cpu, cpu->registery->a);
return 0;
}
// Instruction: Push Status Register to Stack
// Function: status -> stack
// Note: Break flag is set to 1 before push
uint8_t cpu_instruction_operation_php(CPU* cpu)
{
cpu_bus_write(cpu, STACK_POSITION_START + cpu->registery->sp, cpu->registery->s | FLAG_B | FLAG_U);
cpu_status_set(cpu, FLAG_B, FALSE);
cpu_status_set(cpu, FLAG_U, FALSE);
cpu->registery->sp--;
return 0;
}
// Instruction: Pop Accumulator off Stack
// Function: A <- stack
// Flags Out: N, Z
uint8_t cpu_instruction_operation_pla(CPU* cpu)
{
cpu->registery->a = cpu_stack_pop(cpu);
cpu_status_set(cpu, FLAG_U, TRUE);
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 0;
}
// Instruction: Pop Status Register off Stack
// Function: Status <- stack
uint8_t cpu_instruction_operation_plp(CPU* cpu)
{
cpu->registery->s = cpu_stack_pop(cpu);
cpu_status_set(cpu, FLAG_U, TRUE);
return 0;
}
// Instruction: Rotate One Bit Left (Memory or Accumulator)
// C <- [76543210] <- C
uint8_t cpu_instruction_operation_rol(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->alu << 1;
if (is_true(cpu_status_get(cpu, FLAG_C)))
cpu->temp |= 0x01;
cpu_status_set(cpu, FLAG_C, (cpu->temp & 0xff00) > 0);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
if (lookup[cpu->opt_code].addr_mode == &cpu_instruction_addressing_imp)
cpu->registery->a = cpu->temp & 0x00ff;
else
cpu_bus_write(cpu, cpu->addr_abs, cpu->temp & 0x00ff);
return 0;
}
// Instruction: Rotate One Bit Right (Memory or Accumulator)
// C -> [76543210] -> C
uint8_t cpu_instruction_operation_ror(CPU* cpu)
{
fetch(cpu);
cpu->temp = cpu->alu >> 1;
if (is_true(cpu_status_get(cpu, FLAG_C)))
cpu->temp |= 0x0080;
cpu_status_set(cpu, FLAG_C, (cpu->alu & 0x01) > 0);
cpu_status_set(cpu, FLAG_Z, (cpu->temp & 0x00ff) == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->temp & 0x0080) > 0);
if (lookup[cpu->opt_code].addr_mode == &cpu_instruction_addressing_imp)
cpu->registery->a = cpu->temp & 0x00ff;
else
cpu_bus_write(cpu, cpu->addr_abs, cpu->temp & 0x00ff);
return 0;
}
// Instruction: Return from Interrupt
// Function: pull SR, pull PC
uint8_t cpu_instruction_operation_rti(CPU* cpu)
{
cpu->registery->s = cpu_stack_pop(cpu);
cpu->registery->s &= ~FLAG_B;
cpu->registery->s &= ~FLAG_U;
cpu->registery->pc = cpu_stack_pop(cpu);
cpu->registery->pc |= cpu_stack_pop(cpu) << 8;
return 0;
}
// Instruction: Return from Subroutine
// Function: pull PC, PC+1 -> PC
uint8_t cpu_instruction_operation_rts(CPU* cpu)
{
cpu->registery->pc = cpu_stack_pop(cpu);
cpu->registery->pc |= cpu_stack_pop(cpu) << 8;
cpu->registery->pc++;
return 0;
}
// Instruction: Set Carry Flag
// Function: C = 1
uint8_t cpu_instruction_operation_sec(CPU* cpu)
{
cpu_status_set(cpu, FLAG_C, TRUE);
return 0;
}
// Instruction: Set Decimal Flag
// Function: D = 1
uint8_t cpu_instruction_operation_sed(CPU* cpu)
{
cpu_status_set(cpu, FLAG_D, TRUE);
return 0;
}
// Instruction: Set Interrupt Flag / Enable Interrupts
// Function: I = 1
uint8_t cpu_instruction_operation_sei(CPU* cpu)
{
cpu_status_set(cpu, FLAG_I, TRUE);
return 0;
}
// Instruction: Store Accumulator at Address
// Function: M = A
uint8_t cpu_instruction_operation_sta(CPU* cpu)
{
cpu_bus_write(cpu, cpu->addr_abs, cpu->registery->a);
return 0;
}
// Instruction: Store X Register at Address
// Function: M = X
uint8_t cpu_instruction_operation_stx(CPU* cpu)
{
cpu_bus_write(cpu, cpu->addr_abs, cpu->registery->x);
return 0;
}
// Instruction: Store Y Register at Address
// Function: M = Y
uint8_t cpu_instruction_operation_sty(CPU* cpu)
{
cpu_bus_write(cpu, cpu->addr_abs, cpu->registery->y);
return 0;
}
// Instruction: Transfer Accumulator to X Register
// Function: X = A
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tax(CPU* cpu)
{
cpu->registery->x = cpu->registery->a;
cpu_status_set(cpu, FLAG_Z, cpu->registery->x == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->x & 0x80) > 0);
return 0;
}
// Instruction: Transfer Accumulator to Y Register
// Function: Y = A
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tay(CPU* cpu)
{
cpu->registery->y = cpu->registery->a;
cpu_status_set(cpu, FLAG_Z, cpu->registery->y == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->y & 0x80) > 0);
return 0;
}
// Instruction: Transfer Stack Pointer to X Register
// Function: X = stack pointer
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tsx(CPU* cpu)
{
cpu->registery->x = cpu->registery->sp;
cpu_status_set(cpu, FLAG_Z, cpu->registery->x == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->x & 0x80) > 0);
return 0;
}
// Instruction: Transfer X Register to Accumulator
// Function: A = X
// Flags Out: N, Z
uint8_t cpu_instruction_operation_txa(CPU* cpu)
{
cpu->registery->a = cpu->registery->x;
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 0;
}
// Instruction: Transfer X Register to Stack Pointer
// Function: stack pointer = X
uint8_t cpu_instruction_operation_txs(CPU* cpu)
{
cpu->registery->sp = cpu->registery->x;
return 0;
}
// Instruction: Transfer Y Register to Accumulator
// Function: A = Y
// Flags Out: N, Z
uint8_t cpu_instruction_operation_tya(CPU* cpu)
{
cpu->registery->a = cpu->registery->y;
cpu_status_set(cpu, FLAG_Z, cpu->registery->a == 0x00);
cpu_status_set(cpu, FLAG_N, (cpu->registery->a & 0x80) > 0);
return 0;
}
uint8_t cpu_instruction_operation_lax(CPU* cpu)
{
cpu_instruction_operation_lda(cpu);
cpu_instruction_operation_tax(cpu);
return 0;
}
uint8_t cpu_instruction_operation_sax(CPU* cpu)
{
cpu_bus_write(cpu, cpu->addr_abs, cpu->registery->a & cpu->registery->x);
return 0;
}
uint8_t cpu_instruction_operation_dcp(CPU* cpu)
{
cpu_instruction_operation_dec(cpu);
cpu_instruction_operation_cmp(cpu);
}
uint8_t cpu_instruction_operation_isc(CPU* cpu)
{
cpu_instruction_operation_inc(cpu);
cpu_instruction_operation_sbc(cpu);
}
uint8_t cpu_instruction_operation_rla(CPU* cpu)
{
cpu_instruction_operation_rol(cpu);
cpu_instruction_operation_and(cpu);
}
uint8_t cpu_instruction_operation_rra(CPU* cpu)
{
cpu_instruction_operation_ror(cpu);
cpu_instruction_operation_adc(cpu);
}
uint8_t cpu_instruction_operation_slo(CPU* cpu)
{
cpu_instruction_operation_asl(cpu);
cpu_instruction_operation_ora(cpu);
}
uint8_t cpu_instruction_operation_sre(CPU* cpu)
{
cpu_instruction_operation_lsr(cpu);
cpu_instruction_operation_eor(cpu);
}
// This function captures illegal opcodes
uint8_t cpu_instruction_operation_xxx(CPU* cpu)
{
return 0;
}
<file_sep>/Emulator.core/src/emulator/cpu/core/Registery.h
#ifndef __EMULATOR_CPU_REGISTRY_H__
#define __EMULATOR_CPU_REGISTRY_H__
#include <inttypes.h>
#include "Flags.h"
typedef struct registery
{
uint8_t a; // accumulator
uint8_t x; // x register
uint8_t y; // y register
flags s; // status register
uint8_t sp; // stack pointer
uint16_t pc; // program counter
} Registery;
Registery* registery_init();
void registery_destroy(Registery* registery);
void status_set(Registery* registery, flag flag, bool value);
bool status_get(const Registery* registery, flag flag);
#endif | 04b550ce452d85eb29fa42dbbf73037ecf507df8 | [
"C"
] | 43 | C | TomKauffeld/NES-Emulator | bc4a1b73d3177c391f24ff696318311e3e4af6f9 | 15f0de15274b9b64455426f0736934b235d3cc3c | |
refs/heads/master | <file_sep>// LOG_LEVELS
// { error: 0, warn: 1, info: 2, verbose: 3, debug: 4, silly: 5 };
require('./lib/heartbeat');
module.exports = require('./lib/logger')();
<file_sep>const winston = require('winston');
const logger = require('./winston');
const graylog = require('winston-tcp-graylog');
if (!process.env.GRAYLOG_HOST) {
logger.info("GRAYLOG_HOST not found. Graylog disabled");
} else {
const graylogOptions = {
gelfPro: {
adapterName: 'tcp',
adapterOptions: {
host: process.env.GRAYLOG_HOST,
port: 12201
}
},
level: process.env.GRAYLOG_LOG_LEVEL || process.env.LOG_LEVEL || 'silly',
timestamp: true,
handleExceptions: true,
prettyPrint: true
};
logger.add(winston.transports.TcpGraylog, graylogOptions);
logger.transports.tcpGraylog
.on('error', (err) => {
console.log(err);
});
}
module.exports = logger;
<file_sep>const log = require('./logger')();
const startDate = new Date();
module.exports = () => {
log.info(process.env.HOSTNAME || "localhost", {
status: 'running',
startedAt: startDate,
uptime: new Date().getTime() - startDate.getTime()
});
};
setInterval(module.exports, 60 * 1000);
<file_sep>const winston = require('winston');
const logger = require('./graylog');
const Sentry = require('winston-sentry');
logger.add(Sentry, {
level: 'warn',
dsn: process.env.SENTRY_DSN,
timestamp: true,
handleExceptions: true,
patchGlobal: true,
tags: {},
extra: {}
});
module.exports = logger;
| 62752d6a6243494124764c01c086e40c24cccf0f | [
"JavaScript"
] | 4 | JavaScript | webus/nodejs-boot | cd2cd8a8b2e5986663ec9d0694526f25a5cee0b9 | 45515ceae73c5d51983b28e7872a2a133492f10f | |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
/**
* @Author: <NAME>
* @StudentNumber: 300847239
* @DateCreated: June, 06,2016
* @DateReleased: June, 17,2016
* @Description: This program tests the GiantPlanet and TerrestrialPlanet classes.
* Version #: 0.0.3
*/
namespace Assignment3
{
public class Program
{
/// <summary>
/// Main Method
/// </summary>
/// <param name="args"></param>
public static void Main(string[] args)
{
#region GiantPlanet
Console.BackgroundColor = ConsoleColor.Blue;
Console.ForegroundColor = ConsoleColor.White;
Planet Saturn = new GiantPlanet("Saturn", 95.5, 92.5, "Gas");
Console.WriteLine("************************************************");
Console.WriteLine(Saturn.ToString());
Console.WriteLine("************************************************");
#endregion
Console.WriteLine();
Console.BackgroundColor = ConsoleColor.Red;
#region TerrestrialPlanet
TerrestrialPlanet Nebula = new TerrestrialPlanet("Nebula", 70.2, 80.3, true);
Console.WriteLine("************************************************");
Console.WriteLine(Nebula.ToString());
Console.WriteLine("************************************************");
#endregion
Console.WriteLine();
waitForAnyKey();
}
/// <summary>
/// Utility method to wait for a console key press from the user.
/// </summary>
/// @Returns Void
/// @Method WaitForAnyKey()
public static void waitForAnyKey()
{
/*Console.WriteLine("enter 'x' to exit: ");
Console.WriteLine("Hassan Kobeissi - 300847239");
Console.WriteLine("S - To choose Saturn");
Console.WriteLine("G - To choose Giant Planet");
string choice = Console.ReadLine();
/* switch (choice)
{
case "x":
break;
}*/
Console.WriteLine("Press Any Key to exit: ");
Console.ReadKey();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
/**
* @Author: <NAME>
* @StudentNumber: 300847239
* @Date Created: June, 06,2016
* @Datee Released: June/17/2016
* @Document Description: Returns a bool data type
* Version #: 0.0.3
*/
namespace Assignment3
{
public interface IHabitable
{
///<summary>abstract method - force children to implement this method</summary>
bool Habitable();
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
/**
* @Author: <NAME>
* @StudentNumber: 300847239
* @Date Created: June, 06,2016
* @DateReleased: June, 17,2016
* @DocumentDescription: Subclass of Planet Abstract class. Also Implements interfaces.
* Version #: 0.0.3
*/
namespace Assignment3
{
public class TerrestrialPlanet : Planet, IHasMoons, IHabitable
{
#region Private Instance Variable _oxygen
/// <summary>
/// Private Instance Variable _oxygen
/// </summary>
private bool _oxygen;
#endregion
#region Constructor
/// <summary>
/// TerrestrialPlanet Constructor Method
/// </summary>
public TerrestrialPlanet(string name, double diameter, double mass, bool oxygen)
:base(name,diameter,mass)
{
this.Oxygen = oxygen;
}
#endregion
#region Public Methods
/// <summary>
/// A public property Oxygen
/// </summary>
public bool Oxygen
{
get
{
return _oxygen;
}
set
{
_oxygen = value;
}
}
/// <summary>
/// Implementation of the Habitable() method to return true Depending if it's Habitable
public bool Habitable()
{
if (Oxygen == true)
{
return true;
}
else
return false;
}
/// <summary>
/// Implementation of the HasMoons() method to return true Depending on it's Moons
/// </summary>
public bool HasMoons()
{
if (MoonCount > 0)
{
return true;
}
return false;
}
#endregion
#region ToString()
/// <summary>
/// ToString() Override the inherited ToString() method so that it outputs Oxygen.
/// </summary>
/// <returns></returns>
public override string ToString()
{
return base.ToString() + String.Format("\n{0} Has Oxygen? {1}",this.Name, Oxygen);
}
#endregion
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
/**
* @Author: <NAME>
* @StudentNumber: 300847239
* @DateCreated: June, 06,2016
* @DateRelease: June, 17,2016
* @Description: The Parent Class, Abstract Class Planet. Root of Assignment3.
* Version #: 0.0.3
*/
namespace Assignment3
{
public abstract class Planet
{
#region Private Instance Variables
/*PRIVATE INSTANCE VARIABLES*/
private double _diameter;
private double _mass;
private int _moonCount;
private string _name;
private double _orbitalPeriod;
private int _ringCount;
private double _rotationPeriod;
#endregion
#region public Planet
/// <summary>
/// Planet Constructor Method
/// </summary>
/// <param name="name"></param>
/// <param name="diameter"></param>
/// <param name="mass"></param>
public Planet(string name, double diameter, double mass)
{
this._name = name;
this._diameter = diameter;
this._mass = mass;
}
#endregion
#region Public Properties
/*PUBLIC PROPERTIES*/
public double Diameter
{
get
{
return _diameter;
}
}
public double Mass
{
get
{
return _mass;
}
}
public int MoonCount
{
get
{
return _moonCount;
}
set
{
_moonCount = value;
}
}
public string Name
{
get
{
return _name;
}
}
public double OrbitalPeriod
{
get
{
return _orbitalPeriod;
}
set
{
_orbitalPeriod = value;
}
}
public int RingCount
{
get
{
return _ringCount;
}
set
{
_ringCount = value;
}
}
public double RotationPeriod
{
get
{
return _rotationPeriod;
}
set
{
_rotationPeriod = value;
}
}
#endregion
#region ToString()
/// <summary>
/// Override the inherited ToString() method so that it outputs Name, Diameter and Mass to the Console.
/// </summary>
/// <returns></returns>
public override string ToString()
{
return String.Format("Name of Planet: {0}\nDiameter of {1}: {2}\nMass of {3}: {4}", this.Name, this.Name, this.Diameter, this.Name,this.Mass);
}
#endregion
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
/**
* @Author: <NAME>
* @StudentNumber: 300847239
* @Date Created: June, 06,2016
* @Datee Released: June/17/2016
* @Description: GiantPlanet sub class of Planet class. Implements IHasmoons and IHasRings interfaces
* Version #: 0.0.3
*/
namespace Assignment3
{
public class GiantPlanet : Planet, IHasMoons, IHasRings
{
/// <summary>
/// Private Instance Variable - Gas or Ice
/// </summary>
private string _type;
#region Public Properties
//Public Properties
public string Type
{
get
{
return _type;
}
set
{
_type = value;
}
}
/// <summary>
/// Implementation of the HasMoons() method to return true Depending on it's Moons
/// </summary>
public bool HasMoons()
{
if(MoonCount > 0)
{
return true;
}
return false;
}
/// <summary>
/// Implementation of the HasRings() method to return true Depending on it's Rings
/// </summary>
public bool HasRings()
{
if(RingCount > 0)
{
return true;
}
return false;
}
#endregion
#region Constructor
/// <summary>
/// This is the GiantPlanet Class Constructor
/// </summary>
/// <param name="name"></param>
/// <param name="diameter"></param>
/// <param name="mass"></param>
/// <param name="type"></param>
public GiantPlanet(string name, double diameter, double mass, string type):base(name,diameter,mass)
{
this.Type = type;
}
#endregion
#region ToString()
/// <summary>
/// ToString() Override the inherited ToString() method so that it outputs Type.
/// </summary>
/// <returns></returns>
public override string ToString()
{
return base.ToString() + String.Format("\n{0} is of Type: {1}",this.Name,Type);
}
#endregion
}
} | 6505d7dd275c31f3e880e1f6b4438f6e6c9f9d58 | [
"C#"
] | 5 | C# | HKobe/Comp123-Assignment3 | 51fc8886227d12ab60ddb4275711b90d4abfa792 | a241b748c0da0d8f7efc0807af85a23a6da2bfea | |
refs/heads/master | <file_sep>import React, { useState } from "react";
import ReactDOM from "react-dom";
import generate from "interjection-js";
import { AwesomeButton } from "react-awesome-button";
import "react-awesome-button/dist/styles.css";
import "./styles.css";
function App() {
const [interjection, setInterjection] = useState("");
return (
<div className="App">
<h1 className="title">Interjection.js</h1>
<p>
{interjection !== "" ? (
<>
<b>{generate()}!</b> You're awesome!
</>
) : (
"😓"
)}
</p>
<AwesomeButton onPress={() => setInterjection(generate())}>
Make me feel special!
</AwesomeButton>
</div>
);
}
const rootElement = document.getElementById("root");
ReactDOM.render(<App />, rootElement);
| 26953a5cbbd6471bea1e254642dcf96b646057cd | [
"JavaScript"
] | 1 | JavaScript | dimitrisraptis96/interjection-js-demo | df73261d979cbc8d90f3e59b483c361b0a1bf24c | 5176593a540c23a4493a095f96be45cc83f436a7 | |
refs/heads/master | <repo_name>LaurieJS/WIA<file_sep>/FollowAlongFiles/jQuery/1-Starting/js/Fade.js
$(function() {
$("#fadeout").click(function(){
$("#theDiv").fadeOut("normal");
});//end of the fadeout click function
$("#fadein").click(function(){
$("#theDiv").fadeIn("normal");
});
$("#fadeto3").click(function(){
$("#theDiv").fadeTo(2000, .3);
});
$("#fadeup").click(function(){
$("#theDiv").fadeTo(2000, 1, function(){
alert("You have located the Div");
});
});
});//end of the ready function<file_sep>/FollowAlongFiles/CreateJS/1-Starting/js/Tween.js
window.onload = function() {
//get canvas element
var canvas = document.getElementById("canvas1");
//Create our stage wrapped around our canvas
var stage = new createjs.Stage(canvas);
//create ball
var ball = new createjs.Shape();
ball.graphics.setStrokeStyle(5, "round");
ball.graphics.beginStroke("black");
ball.graphics.beginFill("red");
ball.graphics.drawCircle(0, 0, 50);
ball.graphics.endStroke();
ball.graphics.endFill();
//draw a line from the center to the outer edge
ball.graphics.ss(1, "round", "round");
ball.graphics.beginStroke("black");
ball.graphics.mt(0,0);
ball.graphics.lt(0,50);
ball.graphics.endStroke();
//move the ball
ball.x = 100;
ball.y = -100;
//create the tween createjs.Tween.get(display object, {optional props: loop, ignorGlobal} duration, easing)
//will loop the file
var tween = createjs.Tween.get(ball, {loop: true})
.to({x:ball.x, y:canvas.height -55, rotation: -360}, 1500, createjs.Ease.bounceOut )
.wait(1000)
.to({x:canvas.width - 55, rotation: 360}, 2500, createjs.Ease.bounceOut)
.wait(1000)
.call(tester)
.to({scaleX:.5, scaleY:.5, x:30, rotation: -360, y:canvas.height -30, alpha: 0}, 2500, createjs.Ease.bounceOut)
;//end of the tween variable
function tester(){
console.log("The tween has reached this point");
}
stage.addChild(ball);
//stage.update();
//create a ticker--auto update the stage
createjs.Ticker.addEventListener("tick", stage);
};<file_sep>/FollowAlongFiles/CreateJS/1-Starting/js/Strokes.js
window.onload = function() {
var canvas = document.getElementById("canvas1");
var stage = new createjs.Stage(canvas);
//set up shape
var shape = new createjs.Shape();
//set up stroke
shape.graphics.beginStroke("purple");
//setup properties
//setStrokeStyle(width, caps, joints, miter)
shape.graphics.setStrokeStyle(10, "round", "round");
//draw a line
shape.graphics.moveTo(25, 25);
shape.graphics.lineTo(250, 25);
shape.graphics.moveTo(50, 50);
//graphics.quadraticCurveTo(cpx, cpy, x, y)(control points, and ending points)
//shape.graphics.quadraticCurveTo(50, 175, 250, 250);
//arcTo(x1, y1, x2, y2, radius)
//shape.graphics.arcTo(100, 50, 100, 500, 60);
//bezier curves
//bezierCurveTo(cp1x, cp1y, cp2x, cp2y, x, y)(1st set of control, second set of cp's, ending point)
shape.graphics.bezierCurveTo(100, 50, 100, 200, 250, 250);
shape.graphics.lineTo(50, 250);
//Add to stage
stage.addChild(shape);
//update the stage
stage.update();
};<file_sep>/FollowAlongFiles/CreateJS/1-Starting/js/Fills.js
window.onload = function() {
//Set up canvas and stage
var canvas = document.getElementById("canvas1");
var stage = new createjs.Stage(canvas);
//Draw a rectangle
var rect1 = new createjs.Shape();
//Create the fill
//rect1.graphics.beginFill("purple");
//rect1.graphics.beginFill("rgb(162, 216, 255)");
//rect1.graphics.beginFill(createjs.Graphics.getRGB(162, 216, 255, .5));
//make a linear gradient [colors][ratio](where to start color)(where to end color)
// [colors] and [ratios] should have the same number of items in array
//rect1.graphics.beginLinearGradientFill(["yellow","purple", "red"], [0, .5, 1], 50, 50, 150, 150);
//create a radial gradient [colors][ratios](x,y,r 1st center point & radius)(x,y,r 2nd center point & radius)
rect1.graphics.beginRadialGradientFill(["yellow", "red"], [0, 1], 150, 150, 0, 150, 150, 150);
//Draw the rectangle
rect1.graphics.rect(50, 50, 100, 100);
//Add to stage
stage.addChild(rect1);
//Update the stage
stage.update();
};<file_sep>/FollowAlongFiles/FollowAlong_Wk1/js/main.js
/*
<NAME>
Web Interaction and Animation
Follow Along-Week 1
July 2014
*/
window.onload = function(){
//Video 1--setting up canvas
//Make a reference to the canvas element
var canvasElem = document.getElementById("canvas1");
//To change the width and height of the canvas
/*canvasElem.width = 700;
canvasElem.height = 300;*/
//Video 16 Animation
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//begin here
var srcImg = document.getElementById("img1");
var rotation = 0;
ctx.translate(canvasElem.width/2, canvasElem.height/2);
setInterval(function(){
rotation += 8;
ctx.clearRect(-300, -300, canvasElem.width, canvasElem.height);
ctx.rotate(rotation * (Math.PI/180));
//minus half of the image's width and height to center
ctx.drawImage(srcImg, -75, -75);
}, 500);
}//end of ctx if statement
}//end of canvasElem if statement, end of video 16
/*
//Video 15 Compositing, I used the Compositing files as well
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//begin drawing here
ctx.globalAlpha = 1;
// Create an array of rectangles
var rects = [
{x:20, y:20, width:75, height:75 },
{x:40, y:50, width:75, height:75 },
{x:150, y:20, width:75, height:75 },
{x:170, y:50, width:75, height:75 },
{x:280, y:20, width:75, height:75 },
{x:300, y:50, width:75, height:75 },
{x:410, y:20, width:75, height:75 },
{x:430, y:50, width:75, height:75 }
];//end of the rects array
for(var i = 0; i < rects.length; i+=2){
ctx.fillStyle = "blue";
ctx.fillRect(rects[i].x, rects[i].y, rects[i].width, rects[i].height);
ctx.fillStyle = "red";
ctx.fillRect(rects[i+1].x, rects[i+1].y, rects[i+1].width, rects[i+1].height);
//to lower the opacity of the rects
ctx.globalAlpha -=.3;
}//end of the for loop
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video 14 Transforms
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
ctx.fillStyle = "blue";
ctx.fillRect(150, 50, 100, 50);
ctx.translate(200, 75);
ctx.rotate(.5);
ctx.fillStyle = "red";
ctx.fillRect(-50, -25, 100, 50);
//Make a starburst, rotate transform
var radians = (Math.PI/180) * 20;
ctx.translate(ctx.canvas.width/2, ctx.canvas.height/2);
for(var degrees = 0; degrees < 360; degrees+=20){
ctx.beginPath();
ctx.moveTo(0, 0);
ctx.lineTo(100, 0);
ctx.stroke();
ctx.rotate(radians);
}//end of the for loop
//Scale
ctx.save();
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, 100, 50);
//scale transform
ctx.scale(1.5, 2);
ctx.fillRect(0, 50, 100, 50);
ctx.restore();
//create third rectangle
ctx.fillStyle = "purple";
ctx.scale(.5, .5);
ctx.fillRect(0, 100, 100, 50);
//Diff section
ctx.save();
//begin drawing here
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, 100, 50);
//translate the origin to the middle of the canvas
ctx.translate(ctx.canvas.width/2, ctx.canvas.height/2);
ctx.fillRect(-50, -25, 100, 50);
ctx.restore();
ctx.fillStyle = "red";
ctx.fillRect(0, 0, 100, 50);
}//end of the ctx if statement
}//end of the canvasElem if statement
/*
/*
//Video 13 Clipping Masks
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//create variables for x, y, radius, and offset
var x = canvasElem.width/2;
var y = canvasElem.height/2;
var radius = 75;
var offset = 50;
ctx.save();
ctx.beginPath();
ctx.arc(x, y, radius, 0, 2 * Math.PI, false);
ctx.clip();
//draw a blue circle inside the clipping mask
ctx.beginPath();
ctx.arc(x-offset, y-offset, radius, 0, 2 * Math.PI, false);
ctx.fillStyle = "blue";
ctx.fill();
//draw a yellow circle inside the clipping mask
ctx.beginPath();
ctx.arc(x + offset, y, radius, 0, 2 * Math.PI, false);
ctx.fillStyle = "yellow";
ctx.fill();
//draw a red circle inside the clipping mask
ctx.beginPath();
ctx.arc(x, y + offset, radius, 0, 2 * Math.PI, false);
ctx.fillStyle = "red";
ctx.fill();
//to release clipping mask, to get the full stroke
ctx.restore();
//stroke the circle
ctx.beginPath();
ctx.arc(x, y, radius, 2 * Math.PI, false);
ctx.lineWidth = 10;
ctx.strokeStyle = "blue";
ctx.stroke();
//begin drawing here
var srcImg = document.getElementById("img1");
//The image of the train
//ctx.drawImage(srcImg, 0, 0);
//create a circle clipping path
//ctx.beginPath();
//ctx.arc(ctx.canvas.width/2, ctx.canvas.height/2, 250, 0, 2 * Math.PI);
//ctx.clip();
//The image of the train, moved the train here to be clipped
// ctx.drawImage(srcImg, 0, 0);
//Create a path shape clipping mask
ctx.beginPath();
ctx.moveTo(105, 200);
ctx.lineTo(250, 200);
ctx.lineTo(525, 50);
ctx.lineTo(475, 285);
ctx.closePath();
ctx.clip();
//The image of the train, moved the train here to be clipped
ctx.drawImage(srcImg, 0, 0);
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video 12 Using Images
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//begin drawing here
//Variable to hold the image
var srcImg = document.getElementById("img1");
//Draw image directly onto the canvas
//ctx.drawImage(srcImg, 0, 0);
//draw a scaled down drawImage(srcImg, dx, dy, dw, dh)
//ctx.drawImage(srcImg, 50, 50, 240, 300);
//drawing a slice of an image drawImage(srcImg, sx, sy, sw, dx, dw, dh)
//ctx.drawImage(srcImg, 285, 40, 95, 140, 50, 50, 190, 280);
//create a variable that holds a video
var srcVid = document.getElementById("vid1");
srcVid.play();
//setInterval(function, time-ms);
setInterval(function(){
ctx.drawImage(srcVid, 0, 0, 480, 270);
}, 30);//end of the setInterval
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video 11-Creating Gradients
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//Begin drawing here
//create a linear gradient createLinearGradient(x0, y0, x1, y1)
var linGradient = ctx.createLinearGradient(20, 20, 20, 280);
//Adding color stops
linGradient.addColorStop(0, "#f00");
linGradient.addColorStop(.5, "#00f");
linGradient.addColorStop(1, "#0f0");
//create a rectangle
ctx.fillStyle = linGradient;
ctx.fillRect(10, 10, 200, 280);
//create a radial gradient (x0, y0, r0, x1, y1, r1)
var radGradient = ctx.createRadialGradient(525, 150, 20, 525, 150, 100);
//add color stops
radGradient.addColorStop(0, "#f00");
radGradient.addColorStop(.5, "#00f");
radGradient.addColorStop(1, "#0f0");
ctx.fillStyle = radGradient;
ctx.beginPath();
ctx.arc(525, 150, 100, 0, 2 * Math.PI);
ctx.fill();
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video 10 Drawing Using Patterns
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//Begin here
//create a pattern from an image
var patImage = new Image();
//When the image loads, use the pattern
patImage.onload = function(){
ctx.fillStyle = ctx.createPattern(patImage, "repeat");
ctx.fillRect(0, 0, ctx.canvas.width, ctx.canvas.height);
};//end of the onload function
//Add an image--just downloaded an image of blocks
patImage.src ="images/blocks.png";
//create a pattern from another canvas
var patCanvas = document.getElementById("canvas2");
var patCtx = patCanvas.getContext("2d");
patCanvas.width = 35;
patCanvas.height = 35;
//draw a red line in our pattern canvas
patCtx.strokeStyle = "red";
patCtx.lineWidth = 1;
patCtx.beginPath();
patCtx.moveTo(0, 0);
patCtx.lineTo(25, 25);
patCtx.stroke();
//use pattern canvas as an outline on the other canvas
var strokePat = ctx.createPattern(patCanvas, "repeat");
ctx.strokeStyle = strokePat;
ctx.lineWidth = 25;
ctx.strokeRect(50, 50, 200, 200);
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video 9 Creating Shadows
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//No shadow--for use later
ctx.save();
//var originalShadowColor = ctx.shadowColor;
//Set up the shadow
ctx.shadowColor = "#000000";
ctx.shadowOffsetX = 10;
ctx.shadowOffsetY = 10;
ctx.shadowBlur = 10;
//Draw a simple rectangle
ctx.fillStyle = "blue";
ctx.fillRect(20, 20, 200, 100);
//draw text with a shadow(styles first)
ctx.fillStyle = "green";
ctx.shadowColor = "rgba(0, 100, 100, .5)";
ctx. shadowOffsetX = 5;
ctx.shadowOffsetY = 5;
ctx.shadowBlur = 5;
//text info
ctx.font = "25pt Georgia";
ctx.fillText("Drawing Text On A Canvas", 250, 75);
//Draw a red line with a purple shadow
ctx.lineCap = "round";
ctx.lineWidth = 25;
ctx.shadowColor = "purple";
ctx.shadowOffsetX = -15;
ctx.shadowOffsetY = 5;
ctx.shadowBlur = 15;
ctx.strokeStyle = "red";
ctx.beginPath();
ctx.moveTo(50, 200);
ctx.lineTo(450, 200);
ctx.stroke();
//Rectangle with no shadow
//ctx.shadowColor = originalShadowColor;
//restore to the original w/no shadow
ctx.restore();
ctx.fillStyle = "blue";
ctx.fillRect(20, 225, 200, 50);
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video8 Text Rendering
//text is not affected by CSS rules
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
var myString = "Drawing text on a Canvas";
//begin drawing here
//fillText(variable, x-right, y-down)
//simple text using defaults
ctx.fillText(myString, 20, 20);
//Text with font and fill styles
ctx.font = "25pt Georgia";
ctx.fillStyle = "blue";
ctx.fillText(myString, 20, 60);
//Text with font, stroke, baseline and fill styles
ctx.font = "32pt Verdana";
ctx.fillStyle = "yellow";
ctx.textBaseline = "middle";
ctx.strokeStyle = "rgba(0, 255, 0, .5)";
ctx.fillText(myString, 20, 160);
ctx.strokeText(myString, 20, 160);
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*
//Video 7-Creating complex curves bezier and quadratic curves
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//Drawing begins here
ctx.strokeStyle = "blue";
ctx.lineWidth = 5;
//Make a bezier curve
ctx.beginPath();
//starting point
ctx.moveTo(50, 200);
//bezierCurveTo(cx1, cy1(first set of control points), cx2, cy2(second set of control points), x, y(ending coordinates))
ctx.bezierCurveTo(50, 100, 200, 300, 200, 150);
ctx.closePath();
ctx.fill();
ctx.stroke();
//Code for quadraticCurveTo(cx, cy(control points), x, y(ending points))
ctx.beginPath();
//starting point
ctx.moveTo(400, 200);
ctx.quadraticCurveTo(400, 100, 600, 150);
ctx.stroke();
}//end of the ctx if statement
}//end of the canvasElem if statement
*/
/*Video6-Drawing arcs and circles
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//Begin drawing here
ctx.strokeStyle = "blue";
ctx.fillStyle = "red";
ctx.lineWidth = 5;
//Stroke a quarter of an arc
ctx.beginPath();
//basic structure of an arc (x, y, r, sA, eA, aC(bool/anti-clockwise))
ctx.arc(50, 150, 100, 1.5 * Math.PI, 2 * Math.PI);
ctx.stroke();
//stroke a 3/4 arc
ctx.beginPath();
ctx.arc(250, 150, 100, 1.5 * Math.PI, Math.PI);
ctx.stroke();
//full circle degrees = 360. Changed to 180 to cut the circle in half
//and closed the path to give a border
var degrees = 180;
var radians = (degrees/180 * Math.PI);
ctx.beginPath();
ctx.arc(550, 150, 100, 0, radians);
ctx.closePath();
ctx.fill();
ctx.stroke();
}//end of ctx if statement
}//end of canvasElem if statement
*/
/*Video 5 Trapezoid
//Video 5 Drawing Paths using fill() and stroke()
if(canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//Drawing begins here
ctx.strokeStyle = "blue";
ctx.fillStyle = "orange";
ctx.lineWidth = 5;
//Draw an open path
ctx.beginPath();
ctx.moveTo(25, 175);
ctx.lineTo(50, 25);
ctx.lineTo(125, 50);
ctx.lineTo(175, 175);
//to close the path to create a shape
ctx.closePath();
//to fill the shape
ctx.fill();
ctx.stroke();
}//end of ctx if statement
}//end of canvasElem if statement
*/
/*Fourth Video
//Video 4 Saving and Restoring Canvas States
if (canvasElem && canvasElem.getContext){
var ctx = canvasElem.getContext("2d");
if(ctx){
//drawing begins here
ctx.strokeStyle = "red";
ctx.fillStyle = "yellow";
ctx.lineWidth = 10;
//Draw a rectangle
ctx.fillRect(25, 25, 100, 125);
ctx.strokeRect(25, 25, 100, 125);
//Save the state of the previous styles, or settings
ctx.save();
//Draw a rectangle with different settings
ctx.strokeStyle = "green";
ctx.fillStyle = "blue";
ctx.lineWidth = 5;
ctx.fillRect(175, 25, 100, 125);
ctx.strokeRect(175, 25, 100, 125);
//Restore the saved drawing state, the third rectangle uses the saved styles
ctx.restore();
//Draw third rectangle with the same settings as above
ctx.fillRect(325, 25, 100, 125);
ctx.strokeRect(325, 25, 100, 125);
}//end of ctx if statement
}//end of the canvasElem if statement
*/
/*Info from the 2nd & 3rd videos
//If the canvas element is available (Video 2 lines and strokes)
if(canvasElem && canvasElem.getContext){
//Get the drawing context from the element
var ctx = canvasElem.getContext("2d");
if(ctx){
//draw here
//draw a stroked rectangle--styles first, then the rectangle/video2
ctx.strokeStyle = "orange";
ctx.lineWidth = 5;
ctx.strokeRect(10, 10, 80, 80);
//draw a filled rectangle/video2
ctx.fillStyle = "purple";
ctx.fillRect(200, 10, 80, 80);
//draw a filled and stroked rectangle/video2
ctx.strokeStyle = "red";
ctx.fillStyle = "white";
ctx.lineWidth = 3;
ctx.strokeRect(10, 197, 100, 100);
ctx.fillRect(10, 197, 100, 100);
//clearing a rectangle (to see the background)video2
ctx.clearRect(0, 20, 350, 20);
//drawing lines-video3
//To make different line widths
for(var i = 0; i < 10; i++){
//To being drawing beginPath() is required
ctx.beginPath();
ctx.strokeStyle = "blue";
ctx.lineWidth = 5 + i;
ctx.moveTo(70, 30 + i * 20);
ctx.lineTo(220, 30 + i * 20);
//Stroke is required
ctx.stroke();
}//end of the for loop
}//end of the ctx if statement
}//end of the canvasElem if statement
//Video 3 drawing lines & making a new canvas
//Targeting the canvas2 element
var canvasElem2 = document.getElementById("canvas2");
if(canvasElem2 && canvasElem2.getContext){
var ctx2 = canvasElem2.getContext("2d");
if(ctx2){
//draw here
//Draw guide lines
ctx2.strokeStyle = "red";
ctx2.lineWidth = 1;
ctx2.beginPath();
ctx2.moveTo(50, 25);
ctx2.lineTo(50, 175);
ctx2.moveTo(350, 25);
ctx2.lineTo(350, 175);
ctx2.stroke();
//Lines with different end caps
//Style first--Butt
ctx2.lineWidth = 25;
ctx2.strokeStyle = "black";
//Butt has no edge, and stays within the lines
ctx2.lineCap = "butt";
//Then create the path
ctx2.beginPath();
ctx2.moveTo(50, 50);
ctx2.lineTo(350, 50);
ctx2.stroke();
//Style first--Round
ctx2.lineWidth = 25;
ctx2.strokeStyle = "black";
//Rounded corners extend beyond the guidelines
ctx2.lineCap = "round";
//Then create the path
ctx2.beginPath();
ctx2.moveTo(50, 100);
ctx2.lineTo(350, 100);
ctx2.stroke();
//Style first--Square
ctx2.lineWidth = 25;
ctx2.strokeStyle = "black";
//Square corners extend beyond the guidelines
ctx2.lineCap = "square";
//Then create the path
ctx2.beginPath();
ctx2.moveTo(50, 150);
ctx2.lineTo(350, 150);
ctx2.stroke();
}//end of ctx2 if statement
}//end of canvasElem2 if statement
//Targeting the canvas3 element-video3 drawing Joined Lines
var canvasElem3 = document.getElementById("canvas3");
if(canvasElem3 && canvasElem3.getContext){
var ctx3 = canvasElem3.getContext("2d");
if(ctx3){
//draw here
//guide line, they all seem to extend above the guideline
ctx3.strokeStyle = "magenta";
ctx3.lineWidth = 3;
ctx3.beginPath();
ctx3.moveTo(15, 50);
ctx3.lineTo(490, 50);
ctx3.stroke();
//Joined line style examples
ctx3.lineWidth = 15;
ctx3.strokeStyle = "black";
//Rounded joined lines
ctx3.lineJoin = "round";
ctx3.beginPath();
ctx3.moveTo(25, 150);
ctx3.lineTo(75, 50);
ctx3.lineTo(125, 150);
ctx3.stroke();
//Beveled joined lines
ctx3.lineJoin = "bevel";
ctx3.beginPath();
ctx3.moveTo(175, 150);
ctx3.lineTo(225, 50);
ctx3.lineTo(275, 150);
ctx3.stroke();
//Mitered joined lines
ctx3.lineJoin = "miter";
ctx3.beginPath();
ctx3.moveTo(325, 150);
ctx3.lineTo(375, 50);
ctx3.lineTo(425, 150);
ctx3.stroke();
}//end of the ctx3 if statement
}//end of the canvasElem3 if statement
*/
//Begin video 2--fills and strokes (x, y, width, height)
/* ctx.fillStyle = "rgb (20, 20, 20)";
ctx.fillRect(25, 50, 100, 100);*/
//use rgba for alpha transparency
/*ctx.strokeStyle = "rgba (70, 70, 70, 1)";
ctx.strokeRect(130, 200, 40, 70);*/
};//end of the window onload function
<file_sep>/FollowAlongFiles/CreateJS/1-Starting/js/Setup.js
window.onload = function() {
//create a new stage and wrap it around the existing canvas
var canvas = document.getElementById("canvas1");
var stage = new createjs.Stage(canvas);
//create a blue circle
var circleGraphic = new createjs.Graphics();
circleGraphic.beginFill("blue");
circleGraphic.drawCircle(0, 0, 50);//centers at the origin
//create the shape
//shape is a display object that handles vector graphics
var circleShape = new createjs.Shape(circleGraphic);
//access some of the shape circle properties
circleShape.x = 50;//move the shape 50 on the x-axis
circleShape.y = 50;
//add the shape to the stage's display list
stage.addChild(circleShape);
//To show on screen Need to update the stage
stage.update();
};<file_sep>/FollowAlongFiles/Image Galleries/1-Starting/3-DIYbox/js/lBox.js
window.onload = function() {
$(".lightbox_trigger").click(function(e){
//prevent default
e.preventDefault();
//get the clicked link href
var image_href = $(this).attr("href");
//set up the lightbox if it doesn't exist or use the lightbox
if($("#lightbox").length > 0){
//lightbox div is created
//place the clicked href into an image
$("#content").html('<img src=" ' +image_href + ' " />');
$("#lightbox").slideDown(1000);
}else {
//first time the lightbox div will not exist
//create the lightbox
var lightbox =
'<div id="lightbox">' +
'<p>Click Anywhere to Close</p>' +
'<div id="content">' +
'<img src=" ' + image_href + ' " />' +
'</div>' +
'</div>';
//insert the lightbox HTML into the page
$("body").append(lightbox);
$("#lightbox").hide();
$("#lightbox").slideDown(1000);
}//end of the if/else
});//end of the click function
//setup the click to close
$("#lightbox").live('click', function(){
$("#lightbox").slideUp(1000);
});//end of the live function--inserts into the DOM
};<file_sep>/FollowAlongFiles/CreateJS/1-Starting/js/Text.js
window.onload = function() {
var canvas = document.getElementById("canvas1");
var stage = new createjs.Stage(canvas);
//text is a display object
var sentence = new createjs.Text();
//modify the text property
sentence.text = "<NAME> 2016!";
//font property
sentence.font = "bold 30px Times";
//specify font color
sentence.color = "blue";
sentence.x = 150;
sentence.y = 50;
//sentence.rotation = 45;
//textAlign "left, right, center"
sentence.textAlign = "center";
//lineWidth max width of the column of text--like padding
sentence.lineWidth = 150;
//lineHeight
sentence.lineHeight = 35;
//outline (boolean)
sentence.outline = false;
//combine into one line of code (.text, .font, .color)
var sentence2 = new createjs.Text("<NAME> 2020", "bold 30px Times", "blue");
stage.addChild(sentence);
stage.addChild(sentence2);
stage.update();
};<file_sep>/shuck_laurie_Homework1/js/main.js
/*
Name: <NAME>
Date: July 10, 2014
Class & Section: WIA-Section 1
Comments: "HTML5 Canvas Drawing"
*/
/*******************************************
HTML5 Shape Drawing Activity
1. Setup the canvas and 2d context
2. Draw out each shape in the sections below
********************************************/
window.onload = function(){
/*******************************************
FILE SETUP
// Setup up 7 different Canvases in index.html one for each problem.
//Link Modernizr.js
// Link the main.js file
// Setup the call to that canvas and get it's 2d context
//Use Modernizr to verify that your browser supports canvas, include a fallback message
/*******************************************
PART 1
Draw a rectangle starting at point (0 ,0)
That has a width of 50 px and a height of 100px
Set the color of the rectangle to a shade of blue.
Set the stroke color to black and the dimension of the stroke are the same as the rectangle.
Reminder - set the style first then draw.
********************************************/
//In one of the videos it mentioned that we needed to use Modernizr for each section. The user will receive a lot of alerts
//if their browser does not support the Canvas element
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasRect = document.getElementById("canvas1");
var ctx1 = canvasRect.getContext("2d");
//Styles
ctx1.fillStyle = "#b5d8e2";
ctx1.strokeStyle = "black";
ctx1.lineWidth = 1;
//Draw Rectangle here
ctx1.fillRect(0, 0, 50, 100);
ctx1.strokeRect(0, 0, 50, 100);
//If the user's browser does not support canvas
}else{
//Add a full page image or something
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizr.canvas if statement
/*******************************************
PART 2
Draw a circle starting at point (50 ,50)
That has a radius of 20 px
Set the color of the circle to a shade of red and set the alpha to .5
Set the stroke color to black and use a radius of 30px for this circle.
Reminder - set the style first then draw.
Use the arc method
********************************************/
//I was not sure if we were supposed to draw two circles, or a circle with a stroke. I created 2 circles.
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasCirc = document.getElementById("canvas2");
var ctx2 = canvasCirc.getContext("2d");
//Styles-Outer circle to create a stroke around the inner circle
ctx2.lineWidth = 2;
ctx2.strokeStyle = "black";
ctx2.fillStyle = "black";
//draw outer circle
ctx2.beginPath();
ctx2.arc(50, 50, 30, 0, 2 * Math.PI);
ctx2.fill();
ctx2.stroke();
//Inner circle Style
ctx2.fillStyle = "rgba(242, 34, 62, .5)";
//draw Circle here
ctx2.beginPath();
ctx2.arc(50, 50, 20, 0, 2 *Math.PI);
ctx2.fill();
}else{
//Add fallback content
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizer if/else
/*******************************************
PART 3
Practice using Path drawing.
Create a 5-point star shaped pattern using the lineTo method.
Begin this shape at (100, 100)
Height and width and color are up to you.
********************************************/
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasStar = document.getElementById("canvas3");
var ctx3 = canvasStar.getContext("2d");
//Styles
ctx3.fillStyle = "yellow";
ctx3.strokeStyle = "black";
ctx3.lineWidth = 20;
//Draw Star here
ctx3.beginPath();
ctx3.moveTo(100, 100);
ctx3.lineTo(125, 50);//top point--left
ctx3.lineTo(150, 100);//top point--right
ctx3.lineTo(200, 100);//top-right point--top
ctx3.lineTo(150, 150);//top-right point--bottom
ctx3.lineTo(180, 200);//bottom-right point--top
ctx3.lineTo(125, 175);//bottom-right point--bottom
ctx3.lineTo(70, 200);//bottom-left point--bottom
ctx3.lineTo(100, 150);//bottom-left point-top
ctx3.lineTo(50, 100);//top-left point-bottom
ctx3.closePath();//close path
ctx3.stroke();
ctx3.fill();
}else{
//Add fallback content
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizer if/else
/*******************************************
PART 4
Practice drawing with Bezier curves.
Try drawing the top to an umbrella.
This should have one large arc (a half circle) on the top and scalloped edges on the bottom.
Position, height, width and color are your choice.
Do not overlap any other object.
********************************************/
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasUmbrella = document.getElementById("canvas4");
var ctx4 = canvasUmbrella.getContext("2d");
//Styles
ctx4.fillStyle = "orange";
ctx4.strokeStyle = "black";
ctx4.lineWidth = 3;
//Draw Umbrella top here
ctx4.beginPath();
ctx4.arc(100, 100, 80, 0, Math.PI, true);
//ctx4.stroke();
//Draw bezier curves--I am not sure if I need to begin a new path with each curve
//ctx4.beginPath();
//ctx4.moveTo(18, 100);
ctx4.bezierCurveTo(40, 80, 50, 80, 70, 100);
//ctx4.stroke();
//ctx4.beginPath();
//ctx4.moveTo(70, 100);
ctx4.bezierCurveTo(90, 80, 100, 80, 125, 100);
//ctx4.stroke();
//ctx4.beginPath();
//ctx4.moveTo(125, 100);
ctx4.bezierCurveTo(140, 80, 150, 80, 182, 100);
ctx4.fill();
ctx4.stroke();
ctx4.closePath();
}else{
//Add fallback content
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizer if/else
/*******************************************
PART 5
Practice using text.
Draw text into your canvas. It can say whatever you would like in any color.
********************************************/
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasText = document.getElementById("canvas5");
var ctx5 = canvasText.getContext("2d");
//variables for text
var myText = "War is a Racket!";
var myText2 = "Bring the troops home now.";
//saving the context to remove shadows for the 2nd sentence
ctx5.save();
//First Sentence Styles
ctx5.font = "25pt Century Gothic";
ctx5.fillStyle = "red";
ctx5.textBaseline = "middle";
ctx5.strokeStyle = "black";
ctx5.shadowColor = "#717171";
ctx5.shadowOffsetX = 1;
ctx5.shadowOffsetY = 3;
ctx5.shadowBlur = 2;
//Draw first sentence
ctx5.fillText(myText, 10, ctx5.canvas.height/2);
ctx5.strokeText(myText, 10, ctx5.canvas.height/2);
//Clear the shadow
ctx5.restore();
//Style the 2nd Sentence
ctx5.font = "15pt Century Gothic";
ctx5.textBaseline = "middle";
//Draw second sentence
ctx5.fillText(myText2, 5, ctx5.canvas.height/2 + 55);
}else{
//Add fallback content
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizer if/else
/*******************************************
PART 6
Pixel manipulation.
Draw the image logo.png into the canvas in the following 3 ways.
1. The image exactly as it is.
2. Shrink the image by 50%
3. Slice a section of the logo out and draw that onto the canvas.
Reminder to use the drawImage method for all 3 of the ways.
********************************************/
//window.onload = function(){
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasImg = document.getElementById("canvas6");
var ctx6 = canvasImg.getContext("2d");
//variable for the image
var srcImg = document.getElementById("img1");
//Normal sized image
ctx6.drawImage(srcImg, 0, 0);
//Scaled image
//ctx6.drawImage(srcImg, 0, 1100, 1450, 544);
ctx6.drawImage(srcImg, 0, 1100, srcImg.width/2, srcImg.height/2);
//Sliced image, I sliced out the word "Web"
ctx6.drawImage(srcImg, 0, 0, 1100, 500, 0, 1600, 400, 200);
}else{
//Add fallback content
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizer if/else
/*******************************************
PART 7
Putting it all together.
Using a combination of all the methods.
Create a complex scene.
You must use at least 3 different methods.
********************************************/
//I used images, text, shadows, a shape(rectangle), and a gradient
if(Modernizr.canvas){
//Set up the canvas and get the context
var canvasScene = document.getElementById("canvas7");
var ctx7 = canvasScene.getContext("2d");
//variables for the images
var drPaulImg = document.getElementById("img2");
var crowdImg = document.getElementById("img3");
var stairsImg = document.getElementById("img4");
//Draw images
//Main image
ctx7.drawImage(drPaulImg, 0, 0);
//Inset images
ctx7.drawImage(stairsImg, 0, 0, 350, 350);
ctx7.drawImage(crowdImg, 0, 350, 350, 370);
//create a rectangle with linear gradient
//create a linear gradient createLinearGradient(x0, y0, x1, y1)
var linGradient = ctx7.createLinearGradient(655, 0, 655, 150);
//Adding color stops
linGradient.addColorStop(0, "#cccccc");
linGradient.addColorStop(.2, "#ffffff");
//Style
ctx7.fillStyle = linGradient;
//Draw Rectangle
ctx7.fillRect(350, 0, 610, 150);
//Variables for the text-sentences
var drPaul1 = "My Day with Dr. <NAME>";
var drPaul2 = "April 9, 2014";
//Styles for text
ctx7.font = "25pt Century Gothic";
ctx7.fillStyle = "black";
ctx7.strokeStyle = "black";
ctx7.lineWidth = 2;
//Text Shadow
ctx7.shadowColor = "#717171";
ctx7.shadowOffsetX = 1;
ctx7.shadowOffsetY = 2;
ctx7.shadowBlur = 2;
// Draw text-sentences
ctx7.fillText(drPaul1, 460, 60);
ctx7.strokeText(drPaul1, 460, 60);
ctx7.fillText(drPaul2, 565, 110);
ctx7.strokeText(drPaul2, 565, 110);
}else{
//Add fallback content
alert("Sorry, your browser does not support the canvas element.");
}//end of the Modernizer if/else
};//end of the window onload
<file_sep>/FollowAlongFiles/drawscript/js/main.js
/*window.onload = function(){
var theCanvas = document.getElementById("canvas1");
if(theCanvas && theCanvas.getContext){
var ctx = theCanvas.getContext("2d");
if(ctx){
//begin drawing here; ctx is the context
//code from DrawScript--which is awesome!!!
ctx.fillStyle="rgb(48,115,165)";
ctx.fillRect(53,99,169,93);
ctx.fill();
ctx.fillStyle="rgb(204,77,0)";
ctx.beginPath();
ctx.moveTo(524,182);
ctx.bezierCurveTo(524,244,474,294,412,294);
ctx.bezierCurveTo(350,294,299,244,299,182);
ctx.bezierCurveTo(299,120,350,69,412,69);
ctx.bezierCurveTo(474,69,524,120,524,182);
ctx.fill();
ctx.fillStyle="rgb(255,91,11)";
ctx.beginPath();
ctx.moveTo(410,228);
ctx.lineTo(397,197);
ctx.lineTo(364,191);
ctx.lineTo(389,168);
ctx.lineTo(385,135);
ctx.lineTo(414,152);
ctx.lineTo(445,138);
ctx.lineTo(437,171);
ctx.lineTo(460,196);
ctx.lineTo(426,199);
ctx.lineTo(410,228);
ctx.fill();
ctx.fillStyle="rgb(0,0,0)";
ctx.lineStyle="rgb(0,0,0)";
ctx.lineWidth=4.808443546295166;
ctx.beginPath();
ctx.moveTo(155,149);
ctx.lineTo(155,129);
ctx.bezierCurveTo(155,128,156,127,156,126);
ctx.bezierCurveTo(157,125,158,125,159,125);
ctx.lineTo(169,125);
ctx.bezierCurveTo(169,125,171,124,173,123);
ctx.bezierCurveTo(176,122,178,122,180,121);
ctx.bezierCurveTo(182,121,184,120,187,120);
ctx.lineTo(190,120);
ctx.bezierCurveTo(193,120,196,121,198,123);
ctx.bezierCurveTo(199,125,200,127,200,130);
ctx.lineTo(200,130);
ctx.bezierCurveTo(202,132,202,134,202,136);
ctx.lineTo(202,137);
ctx.bezierCurveTo(203,139,203,140,203,142);
ctx.bezierCurveTo(203,143,203,143,203,144);
ctx.bezierCurveTo(204,146,205,147,205,149);
ctx.bezierCurveTo(205,152,204,153,202,155);
ctx.bezierCurveTo(201,157,199,158,196,158);
ctx.lineTo(191,158);
ctx.bezierCurveTo(192,160,192,162,192,164);
ctx.bezierCurveTo(192,166,192,168,191,170);
ctx.bezierCurveTo(190,171,189,172,188,173);
ctx.bezierCurveTo(186,174,185,174,183,174);
ctx.bezierCurveTo(182,174,181,174,180,173);
ctx.bezierCurveTo(179,172,179,171,178,170);
ctx.bezierCurveTo(178,169,178,168,177,167);
ctx.bezierCurveTo(177,166,177,165,177,165);
ctx.bezierCurveTo(177,164,176,163,176,162);
ctx.bezierCurveTo(175,161,174,160,173,158);
ctx.bezierCurveTo(170,156,169,154,168,153);
ctx.lineTo(159,153);
ctx.bezierCurveTo(158,153,157,153,156,152);
ctx.bezierCurveTo(156,151,155,150,155,149);
ctx.fill();
ctx.stroke();
ctx.fillStyle="rgb(0,0,0)";
ctx.lineStyle="rgb(0,0,0)";
ctx.lineWidth=4.808443546295166;
ctx.beginPath();
ctx.moveTo(160,129);
ctx.bezierCurveTo(159,130,159,130,159,131);
ctx.bezierCurveTo(159,131,159,132,160,132);
ctx.bezierCurveTo(160,133,161,133,161,133);
ctx.bezierCurveTo(162,133,162,133,163,132);
ctx.lineTo(163,131);
ctx.bezierCurveTo(163,130,163,130,163,129);
ctx.bezierCurveTo(162,129,162,129,161,129);
ctx.lineTo(160,129);
ctx.fill();
ctx.stroke();
ctx.fillStyle="rgb(0,0,0)";
ctx.lineStyle="rgb(0,0,0)";
ctx.lineWidth=4.808443546295166;
ctx.beginPath();
ctx.moveTo(168,149);
ctx.lineTo(169,149);
ctx.bezierCurveTo(169,149,169,149,170,150);
ctx.lineTo(171,150);
ctx.bezierCurveTo(171,151,172,151,172,152);
ctx.bezierCurveTo(173,152,173,152,174,153);
ctx.bezierCurveTo(174,154,174,154,175,154);
ctx.bezierCurveTo(175,155,175,155,176,156);
ctx.bezierCurveTo(176,156,176,157,177,157);
ctx.bezierCurveTo(178,158,179,159,179,160);
ctx.bezierCurveTo(180,160,181,162,181,163);
ctx.bezierCurveTo(181,165,182,166,182,167);
ctx.bezierCurveTo(182,168,183,169,183,170);
ctx.bezierCurveTo(185,170,186,169,187,168);
ctx.bezierCurveTo(188,167,188,166,188,164);
ctx.bezierCurveTo(188,162,188,161,187,159);
ctx.bezierCurveTo(186,156,185,155,185,153);
ctx.lineTo(196,153);
ctx.bezierCurveTo(197,153,198,153,199,152);
ctx.bezierCurveTo(200,151,200,150,200,149);
ctx.bezierCurveTo(200,149,200,148,200,147);
ctx.bezierCurveTo(199,146,199,145,198,145);
ctx.lineTo(199,144);
ctx.bezierCurveTo(199,143,199,142,199,142);
ctx.bezierCurveTo(199,140,199,139,197,138);
ctx.bezierCurveTo(198,137,198,137,198,136);
ctx.bezierCurveTo(198,135,198,134,198,133);
ctx.bezierCurveTo(197,133,197,132,196,132);
ctx.bezierCurveTo(196,131,196,130,196,130);
ctx.bezierCurveTo(196,128,196,127,195,126);
ctx.bezierCurveTo(194,125,192,125,190,125);
ctx.lineTo(186,125);
ctx.bezierCurveTo(183,125,180,125,175,127);
ctx.bezierCurveTo(175,127,175,127,174,127);
ctx.lineTo(173,128);
ctx.lineTo(172,128);
ctx.bezierCurveTo(171,128,171,128,171,128);
ctx.bezierCurveTo(170,128,170,129,170,129);
ctx.bezierCurveTo(169,129,169,129,169,129);
ctx.lineTo(168,129);
ctx.lineTo(168,149);
ctx.fill();
ctx.stroke();
}//end of the ctx if statement
}//end of the getContext if statement
};//end of the window.onload function*/
window.onload = function(){
//get the canvas element
var canvas = document.getElementById("canvas1");
//create the stage
var stage = new createjs.Stage(canvas);
//create a graphic
var shape = new createjs.Shape();
//draw shape here
shape.graphics.f("rgba(189,204,212,254)").p("EB82B9KMh8/AAAMAAAh9AMB8/AAAMAAAB9A").cp().ef().f("rgba(122,201,67,254)").p("EADmBi6MAAAhKiYAAmGCMlUEYkYYEYkYFUiMGGAAMBKiAAAYGGAAFUCMEYEYYEYEYCCFUAAGG<KEY>();
//add to the stage
stage.addChild(shape);
//update the stage
stage.update();
};//end of the onload function<file_sep>/FollowAlongFiles/Data Viz/1-Staring Files/3-Chart.js-master/js/LineGraph.js
$(function() {
var canvas = document.getElementById("canvas");
var ctx = canvas.getContext("2d");
var lineChartData = {
labels: ["PB n J", "Hot Dogs", "Top Ramen", "Grilled Cheese", "Hot Pockets", "Mac N Cheese", "Beans N Rice"],
datasets:[
{
fillColor: "rgba(166, 209, 122, .5)",
strokeColor: "rgab(166, 209, 122, 1)",
pointColor: "black",
pointStrokeColor: "#fff",
data:[1, 20, 40, 20, 60, 75, 40]
}//data
]//labels
};//line chart data
var myLine = new Chart(ctx).Line(lineChartData);
});
<file_sep>/FollowAlongFiles/jQuery/1-Starting/js/ShowHide.js
$(function() {
//add a click function to the button
//Hide
$("#hide").click(function(){
$("#theDiv").hide("slow", function(){
//alert("The Div has received the invisibility cloak...");
$("#theDiv").css("background-color", "red");
});//end of the hide slow function
});//end of the hide click function
//Show
$("#show").click(function(){
$("#theDiv").show("slow");
});//end of the show click function
//Toggle
$("#toggle").click(function(){
$("#theDiv").toggle("slow");
});//end of the toggle click function
});//end of the ready function
<file_sep>/FollowAlongFiles/CreateJS/1-Starting/js/Bitmap.js
window.onload = function() {
//Canvas & Stage
var canvas=document.getElementById("canvas");
var stage= new createjs.Stage(canvas);
var bmp = new createjs.Bitmap();
//load the image in & wait until it is fully loaded
var img = new Image();
img.src = "images/ship.svg";
img.onload = updateStage;
//updateStage function
function updateStage(e){
bmp = new createjs.Bitmap(e.target);
var centerX = canvas.width/2;
var centerY = canvas.height/2;
//move the ship to the center of the stage
bmp.x = centerX;
bmp.y = centerY;
//change the registration point
bmp.regX = 50;
bmp.regY = 50;
//scale it up (times 4)
bmp.scaleX = 4;
bmp.scaleY = 4;
stage.addChild(bmp);
//stage.update();
}//end of the updateStage function
//ticker to control timer
createjs.Ticker.addEventListener("tick", tick);
function tick(e){
bmp.rotation += 2;
bmp.x += 1;
stage.update();
}//end of the tick function
};<file_sep>/FollowAlongFiles/Starting AV Files/3-SoundJS/js/manifest.js
window.onload = function() {
if(!createjs.Sound.initializeDefaultPlugins()){
return;
}//end of the if
//create a variable to hold a path to the audio files
var audioPath = "audio/";
//create an array of objects--each object is a sound, which is called a manifest
var manifest = [
{id: "Music", src: audioPath+"music.mp3|"+audioPath+"music.ogg"},
{id: "Thunder", src: audioPath+"Thunder1.mp3|"+audioPath+"Thunder1.ogg"}
];//end of the array
//register the manifest with Soundjs
createjs.Sound.registerManifest(manifest);
//listen for files to load
createjs.Sound.addEventListener("fileload", handleLoad);
function handleLoad(event){
//This will run once each time the file is initially loaded
//createjs.Sound.play(event.src);
}//end of the handleLoad function
};//end of the window.onload
function playMusic(){
createjs.Sound.play("Music");
}
function playThunder(){
//BRING the Thunder ROFL
createjs.Sound.play("Thunder");
} | 0531b5011ecdc43e802147a4f45fb2ca9f02da85 | [
"JavaScript"
] | 14 | JavaScript | LaurieJS/WIA | 51aa000915d4d4d770ce1e3e488f080351f6a728 | f91cdf55244466f6e1fa5dbd27b16cc1c8026031 | |
refs/heads/master | <file_sep>from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from backend.models import Taskinfo
from backend.serializers import TaskinfoSerializer,UserSerializier
from rest_framework.renderers import JSONRenderer
from django.http import HttpResponse, JsonResponse
from rest_framework.views import APIView
from django.http import Http404
from rest_framework import generics
from django.contrib.auth.models import User
from rest_framework import permissions
from backend.permissions import IsOwner
# Create your views here.
'''class Tasklist(APIView):
def get(self,request,format=None):
tasks = Taskinfo.objects.all()
serializier=TaskinfoSerializer(tasks,many=True)
return Response(serializier.data)
def post(self,request,format=None):
serializier=TaskinfoSerializer(data=request.data)
if serializier.is_valid():
serializier.save()
return Response(serializier.data)
return Response(serializier.errors)
class TaskDetail(APIView):
def get_objects(self,task_id):
try:
return Taskinfo.objects.get(pk=task_id)
except Taskinfo.DoesNotExist:
raise Http404
def get(self,request,task_id,format=None):
task = self.get_objects(task_id)
serializer = TaskinfoSerializer(task)
return Response(serializer.data)
def post(self,request,task_id,format=None):
task = self.get_objects(task_id)
serializer = TaskinfoSerializer(task,request.data)
if serializer.is_vaid():
serializer.save()
return Response(serializer.data)
return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self,request,task_id,format=None):
task = self.get_objects(task_id)
task.delete()
return Response(status=status.HTTP_204_NO_CONTENT)'''
class Tasklist(generics.ListCreateAPIView):
permission_classes = [permissions.IsAuthenticated, IsOwner]
serializer_class = TaskinfoSerializer
def get_queryset(self):
"""
This view should return a list of all the purchases
for the currently authenticated user.
"""
user = self.request.user
return Taskinfo.objects.filter(user=user)
def perform_create(self, serializer):
serializer.save(user=self.request.user) #save the task along with its creator
class TaskDetail(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [permissions.IsAuthenticated,IsOwner]
queryset = Taskinfo.objects.all()
serializer_class = TaskinfoSerializer
class UserList(generics.ListAPIView):
queryset = User.objects.all()
serializer_class = UserSerializier
class UserDetail(generics.RetrieveAPIView):
queryset = User.objects.all()
serializer_class = UserSerializier<file_sep>from rest_framework import serializers
from backend.models import Taskinfo
from django.contrib.auth.models import User
class TaskinfoSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = Taskinfo
fields = ('id','task','status','created','user')
class UserSerializier(serializers.ModelSerializer):
tasks = serializers.PrimaryKeyRelatedField(many=True, queryset=Taskinfo.objects.all())
class Meta:
model = User
fields = ('id','username','tasks')
<file_sep>from django.db import models
# Create your models here.
class Taskinfo(models.Model):
task = models.CharField(max_length=100, blank=True, default='')
status = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey('auth.User',related_name='tasks',on_delete=models.CASCADE,default='')<file_sep>from django.urls import path,include
from backend import views
urlpatterns = [
path('',views.Tasklist.as_view()),
path('detail/<int:pk>',views.TaskDetail.as_view()),
path('users',views.UserList.as_view()),
path('users/<int:pk>',views.UserDetail.as_view())
]
urlpatterns += [
path('api-auth/', include('rest_framework.urls')),
] | 147976b64e47b9e9d0f7e8e0e03895c5e73df229 | [
"Python"
] | 4 | Python | Absalom-Silwal/todo_backend | 88de70ed26da72003c8164884e006257d2a3a60f | d9a6b8037780c6b0b136e2a3891e41a714edbef5 | |
refs/heads/master | <repo_name>LukasGlader/Calendar<file_sep>/date/julian.cpp
#include "julian.h"
#define IGREG (15+31L*(10+12L*1582))
namespace lab2{
Julian::Julian():Gregorian()
{
}
Julian::Julian(const Date& d)
{
julian_day_number = d.julian_day();
}
Julian::Julian(int jdn)
{
julian_day_number = jdn;
}
Julian::Julian(int year, int month, int day)
{
is_valid(day, month, year);
julian_day_number = julian_date_to_JDN(year, month, day);
}
Julian::~Julian()
{
}
double Julian::julian_date_to_JDN(int year, int month, int day) const
{
//std::cout << "Julian->JDN date: " << year <<"-"<<month<<"-"<<day<<" ->";
if(year < 1 ){
year++;
}
if( month <= 2 ){
year--;
month += 12;
}
double jdn = ((floor(365.25*(year+4716)) +
floor(30.6001*(month+1))+day)-1524.5);
//std::cout << (int)jdn << "\n";
return jdn;
}
day_month_year Julian::JDN_to_julian(double julian) const
{
//std::cout << "\tJDN->Julian JDN: " << (int)julian << " -> ";
day_month_year result;
int day, month, year;
int b = JDN_to_mod_julian_day(julian) + 2401525;
int c = (int)((b - 122.1) / 365.25);
int da = (int)(365.25 * c);
int e = (int)((b - da) / 30.6001);
month =(int)( (e < 14) ? (e - 1) : (e - 13) );
year = (int)( (month > 2 ) ? (c - 4716) : (c - 4715));
day = (int)(b-da-floor(30.6001*e));
//std::cout << year <<"-"<<month<<"-"<<day<<"\n";
result.day = day;
result.month = month;
result.year = year;
return result;
}
const int Julian::year() const
{
day_month_year date = JDN_to_julian(julian_day_number);
return date.year;
}
const int Julian::month() const
{
day_month_year date = JDN_to_julian(julian_day_number);
return date.month;
}
const int Julian::day() const
{
day_month_year date = JDN_to_julian(julian_day_number);
return date.day;
}
Date& Julian::add_year(int n)
{
if(is_leap_day())
{
if(is_leap_year(year() + n))
{
julian_day_number = julian_date_to_JDN(
year() + n, month(), day());
}else //jumping from leap day to non leap day.
{
julian_day_number = julian_date_to_JDN(
year() + n, 2, 28);
}
}else
{
julian_day_number = julian_date_to_JDN(
year() + n, month(), day());
}
return (*this);
}
Date& Julian::sub_month()
{
int next_year = year();
int next_month = month();
if(month() == 1)
{
next_month = 12;
next_year--;
}else
{
next_month--;
}
if(days_in_month(next_year, next_month) >= day())
{
julian_day_number = julian_date_to_JDN(next_year, next_month, day());
}else
{
julian_day_number -= 30;
}
return (*this);
}
Date& Julian::add_month()
{
int next_year = year();
int next_month = month();
if(month() == 12)
{
next_month = 1;
next_year++;
}else
{
next_month++;
}
if(days_in_month(next_year, next_month) >= day())
{
julian_day_number = julian_date_to_JDN(next_year, next_month, day());
}else
{
julian_day_number += 30;
}
return (*this);
}
bool Julian::is_leap_year(int year) const
{
return (year % 4 == 0);
}
const int Julian::week_day()const{
// Julian
int m = month();
int y = year();
if (m < 3){
m += 12;
y -=1;
}
int k = y % 100;
int j = y /100;
int h = ((day() + (int)(((m+1)*26)/10) + k + (int)(k/4) + 5 + (6*j))) % 7;
h -= 1;
if(h == 0){
h = 7;
}
if(h == -1){
h = 6;
}
return h;
}
}
<file_sep>/cal/date.cpp
#include "date.h"
namespace lab2{
Date::Date(void)
{
}
Date::~Date(void)
{
}
std::ostream& operator<<(std::ostream & os, const Date& d){
os << d.year() << "-";
int month = d.month();
if(month < 10)
os << "0";
os << month << "-";
int day = d.day();
if(day < 10)
os << "0";
os << day;
return os;
}
const int Date::operator-(const Date& d) const{
return (mod_julian_day() - d.mod_julian_day());
}
const bool Date::operator==(const Date& d) const{
return (mod_julian_day() == d.mod_julian_day());
}
const bool Date::operator!=(const Date& d) const{
return (mod_julian_day() != d.mod_julian_day());
}
const bool Date::operator<(const Date& d) const{
return (mod_julian_day() < d.mod_julian_day());
}
const bool Date::operator<=(const Date& d) const{
return (mod_julian_day() <= d.mod_julian_day());
}
const bool Date::operator>(const Date& d) const{
return (mod_julian_day() > d.mod_julian_day());
}
const bool Date::operator>=(const Date& d) const{
return (mod_julian_day() >= d.mod_julian_day());
}
}
<file_sep>/old/julian.cpp
#include "julian.h"
namespace lab2 {
Julian::Julian(int year, int month, int day) {
checkValid(year, month, day);
julianDayNumber = julianDateToJDN(year, month, day);
jdnToJulianDate(julianDayNumber);
}
Julian::Julian(Date*& other) {
jdnToJulianDate(other->mod_julian_day() + 2400000);
}
Julian::Julian(Date* other) {
jdnToJulianDate(other->mod_julian_day() + 2400000);
}
Julian::Julian(const Date& other) {
jdnToJulianDate(other.mod_julian_day() + 2400000);
}
Julian::Julian(int jdn) {
jdnToJulianDate(jdn);
}
Julian::Julian():Gregorian() {
jdnToJulianDate(julianDayNumber);
}
Julian::~Julian(void) {
}
const int Julian::days_this_month() const {
return days_this_month(currentYear, currentMonth);
}
int Julian::days_this_month(int year, int month) const{
if(month == 0) month = 12;
int result = 0;
if(month == 2 && isLeapYear(year)){
result = 29;
}else{
result = daysInMonth[month];
}
return result;
}
//prefix
Julian& Julian::operator++() {
julianDayNumber++;
jdnToJulianDate(julianDayNumber);
return *this;
}
Julian& Julian::operator--() {
julianDayNumber--;
jdnToJulianDate(julianDayNumber);
return *this;
}
const int Julian::week_day()const{
int m = month();
int y = year();
if (m < 3){
m += 12;
y -=1;
}
int k = y % 100;
int j = y /100;
int h = ((day() + (int)(((m+1)*26)/10) + k + (int)(k/4) + 5 + (6*j))) % 7;
h -= 1;
if(h == 0){
h = 7;
}
if(h == -1){
h = 6;
}
return h;
}
/**
* Converts a julian date to a Julian Day Number
**/
int Julian::julianDateToJDN(int year, int month, int day) {
if(year < 1 ){
year++;
}
if( month <= 2 ){
year--;
month += 12;
}
return ((floor((365.25)*(year+4716)) +
floor(30.6001*(month+1))+day)-1524.5);
}
bool Julian::isLeapYear(int year) const {
if(year % 4 == 0){
return true;
}else{
return false;
}
}
/**
* Converts a Julian Day Number to a Julian Date
**/
void Julian::jdnToJulianDate(int jdn) {
int b = mod_julian_day() + 2401525;
int c = (int)((b - 122.1) / static_cast<double>(365.25));
int da = (int)(static_cast<double>(365.25) * c);
int e = (int)((b - da) / 30.6001);
currentMonth = (int)( (e < 14) ? (e - 1) : (e - 13) );
currentYear = (int)( (currentMonth > 2 ) ? (c - 4716) : (c - 4715));
currentDay = (int)(b-da-floor(30.6001*e));
}
void Julian::operator+=(int n) {
julianDayNumber += n;
jdnToJulianDate(julianDayNumber);
}
void Julian::operator-=(int n) {
julianDayNumber -= n;
jdnToJulianDate(julianDayNumber);
}
Julian& Julian::operator=(const Date& d)
{
julianDayNumber = d.julianDayNumber;
updateDateWithJDN(julianDayNumber);
return *this;
}
void Julian::updateDateWithJDN(int jdn) {
jdnToJulianDate(jdn);
return;
}
Date& Julian::add_year(int n) {
if(currentDay > days_this_month(currentYear+n, currentMonth))
{
julianDayNumber = julianDateToJDN(currentYear + n, currentMonth, days_this_month(currentYear + n,currentMonth));
}else
{
julianDayNumber = julianDateToJDN(currentYear + n, currentMonth, currentDay);
}
jdnToJulianDate(julianDayNumber);
return *this;
}
Date& Julian::add_month(int n) {
int tmpYear = currentYear;
int tmpMonth = currentMonth + n;
tmpYear += tmpMonth/12;
tmpMonth = tmpMonth % 12;
if(tmpMonth <= 0)
{
tmpMonth = 12 + tmpMonth;
--tmpYear;
}
if(currentDay > days_this_month(tmpYear,tmpMonth))
{
julianDayNumber = julianDateToJDN(tmpYear, tmpMonth, days_this_month(tmpYear,tmpMonth));
}else
{
julianDayNumber = julianDateToJDN(tmpYear, tmpMonth, currentDay);
}
jdnToJulianDate(julianDayNumber);
return *this;
}
}
<file_sep>/old/julian.h
#pragma once
#include <math.h>
#include "gregorian.h"
#include <math.h>
namespace lab2{
class Julian :
public Gregorian
{
public:
Julian(void);
Julian(int); //JDN Constructor
Julian(Date*);
Julian(Date*&);
Julian(const Date&);
Julian(int year, int month, int day);
~Julian(void);
const virtual int week_day() const;
const virtual int days_this_month() const;
int days_this_month(int year, int month) const;
void jdnToJulianDate(int jdn);
int julianDateToJDN(int year, int month, int day);
//prefix
virtual Julian& operator++();
virtual Julian& operator--();
Julian& operator=(const Date& d);
void updateDateWithJDN(int jdn);
virtual void operator+=(int n);
virtual void operator-=(int n);
virtual Date& add_year(int n = 1);
virtual Date& add_month(int n = 1);
private:
protected:
virtual bool isLeapYear(int year) const;
};
}
<file_sep>/old/date.h
#pragma once
#include <string>
#include <iostream>
namespace lab2{
class Date
{
public:
Date(void);
~Date(void);
const virtual int year() const = 0;
const virtual int month() const = 0;
const virtual int day() const = 0;
const virtual int week_day() const = 0;
const virtual int days_per_week() const = 0;
const virtual int days_this_month() const = 0;
const virtual int months_per_year() const = 0;
const virtual std::string week_day_name() const = 0;
const virtual std::string month_name() const = 0;
//prefix
virtual Date& operator++() = 0;
virtual Date& operator--() = 0;
virtual void operator+=(int n) = 0;
virtual void operator-=(int n) = 0;
int julianDayNumber;
virtual Date& add_year(int n = 1) = 0;
virtual Date& add_month(int n = 1) = 0;
virtual int mod_julian_day() const = 0;
const virtual bool operator==(const Date& d) const;
const virtual bool operator!=(const Date& d) const;
const virtual bool operator<(const Date& d) const;
const virtual bool operator<=(const Date& d) const;
const virtual bool operator>(const Date& d) const;
const virtual bool operator>=(const Date& d) const;
const virtual int operator-(const Date& d) const;
};
std::ostream& operator<<(std::ostream & os, const Date& d);
}
<file_sep>/cal/gregorian.cpp
#include "gregorian.h"
namespace lab2{
//spot 0 empty to avoid having to do index arithmetic later
const std::string Gregorian::day_names[8] =
{"INVALID_DAY",
"monday",
"tuesday",
"wednesday",
"thursday",
"friday",
"saturday",
"sunday"};
const std::string Gregorian::month_names[13] =
{"INVALID_MONTH",
"january",
"february",
"march",
"april",
"may",
"june",
"july",
"august",
"september",
"october",
"november",
"december"};
const int Gregorian::days_in_month_array[13] =
{0,31,28,31,30,31,30,31,31,30,31,30,31};
Gregorian::Gregorian()
{
time_t t;
k_time(&t); //time(t);
julian_day_number = (t/86400)+ 2440587.5;
}
Gregorian::Gregorian(const Date& d)
{
julian_day_number = d.julian_day();
}
Gregorian::Gregorian(int jdn)
{
julian_day_number = jdn;
}
Gregorian::Gregorian(int year, int month, int day)
{
is_valid(day, month, year);
julian_day_number = gregorian_date_to_JDN(year, month, day);
}
Gregorian::~Gregorian()
{
}
int Gregorian::mod_julian_day() const
{
return (int)JDN_to_mod_julian_day(julian_day_number);
}
double Gregorian::JDN_to_mod_julian_day(double jdn) const
{
return (jdn - 2400000.5);
}
double Gregorian::julian_day() const
{
return julian_day_number;
}
double Gregorian::gregorian_date_to_JDN(int y, int m, int d) const
{
return (1721425.5 - 1) +
(365 * (y - 1)) +
(int)((y - 1) / 4) +
(-(int)((y - 1) / 100)) +
(int)((y - 1) / 400) +
(int)((((367 * m) - 362) / 12) +
((m <= 2) ? 0 :
(is_leap_year(y) ? -1 : -2)
) +
d);
}
day_month_year Gregorian::JDN_to_gregorian(double jd) const
{
day_month_year result;
int y, m, d;
double j;
j = jd - 1721119 + 0.5 ;
y = (int)((4 * j - 1) / 146097 );
j = 4 * j - 1 - 146097 * y ;
d = (int)(j / 4) ;
j = (4 * d + 3) / 1461 ;
d = 4 * d + 3 - 1461 * j ;
d = (d + 4) / 4 ;
m = (5 * d - 3) / 153 ;
d = 5 * d - 3 - 153 * m ;
d = (d + 5) / 5 ;
y = 100 * y + j ;
if(m < 10)
{
m = m + 3;
}else
{
m = m - 9 ; y = y + 1;
}
result.day = d;
result.month = m;
result.year = y;
return result;
}
const int Gregorian::year() const
{
day_month_year date = JDN_to_gregorian(julian_day_number);
return date.year;
}
const int Gregorian::month() const
{
day_month_year date = JDN_to_gregorian(julian_day_number);
return date.month;
}
const int Gregorian::day() const
{
day_month_year date = JDN_to_gregorian(julian_day_number);
return date.day;
}
const int Gregorian::week_day() const
{
int m = month();
int y = year();
if (m < 3){
m += 12;
y -=1;
}
int k = y % 100;
int j = y /100;
int h = ((day() + (int)(((m+1)*26)/10) + k + (int)(k/4) + (int)(j/4) + (5*j))) % 7;
h -= 1;
if(h == 0){
h = 7;
}
if(h == -1){
h = 6;
}
return h;
}
const int Gregorian::days_per_week() const
{
return 7;
}
const int Gregorian::days_this_month() const
{
return days_in_month(year(), month());
}
const int Gregorian::months_per_year() const
{
return 12;
}
int Gregorian::days_in_month(int year, int month) const
{
if(month == 0) month = 12;
int result = 0;
if(month == 2 && is_leap_year(year)){
result = 29;
}else{
result = days_in_month_array[month];
}
return result;
}
const std::string Gregorian::week_day_name() const
{
return day_names[week_day()];
}
const std::string Gregorian::month_name() const
{
return month_names[month()];
}
//prefix
Date& Gregorian::operator++()
{
julian_day_number++;
return *this;
}
Date& Gregorian::operator--()
{
julian_day_number--;
return *this;
}
//postfix
Gregorian Gregorian::operator++(int n)
{
Gregorian old(year(), month(), day());
++(*this);
return old;
}
Gregorian Gregorian::operator--(int n)
{
Gregorian old(year(), month(), day());
--(*this);
return old;
}
void Gregorian::operator+=(int n)
{
julian_day_number+= n;
}
void Gregorian::operator-=(int n)
{
julian_day_number-= n;
}
Gregorian& Gregorian::operator=(const Date& d)
{
int jdn = d.julian_day();
julian_day_number = jdn;
return *this;
}
Date& Gregorian::add_year(int n)
{
if(is_leap_day())
{
if(is_leap_year(year() + n))
{
julian_day_number = gregorian_date_to_JDN(
year() + n, month(), day());
}else //jumping from leap day to non leap day.
{
julian_day_number = gregorian_date_to_JDN(
year() + n, 2, 28);
}
}else
{
julian_day_number = gregorian_date_to_JDN(
year() + n, month(), day());
}
return (*this);
}
Date& Gregorian::add_year()
{
add_year(1);
return (*this);
}
bool Gregorian::is_leap_day() const
{
return (month() == 2 && day() == 29);
}
Date& Gregorian::sub_month()
{
int next_year = year();
int next_month = month();
if(month() == 1)
{
next_month = 12;
next_year--;
}else
{
next_month--;
}
if(days_in_month(next_year, next_month) >= day())
{
julian_day_number = gregorian_date_to_JDN(next_year, next_month, day());
}else
{
julian_day_number -= 30;
}
return (*this);
}
Date& Gregorian::add_month(int n)
{
if(n > 0)
{
for(int i = 0; n > i; i++)
add_month();
}else
{
for(int i = 0; std::abs(n) > i; i++)
sub_month();
}
return (*this);
}
Date& Gregorian::add_month()
{
int next_year = year();
int next_month = month();
if(month() == 12)
{
next_month = 1;
next_year++;
}else
{
next_month++;
}
if(days_in_month(next_year, next_month) >= day())
{
julian_day_number = gregorian_date_to_JDN(next_year, next_month, day());
}else
{
julian_day_number += 30;
}
return (*this);
}
bool Gregorian::is_leap_year(int year) const
{
if(year % 4 == 0)
if(year % 100 == 0)
if(year % 400 == 0)
return true;
else
return false;
else
return true;
else
return false;
}
void Gregorian::is_valid(int d, int m, int y) const
{
if(m < 1 || m > 12 || d < 1 || d > days_in_month(y,m)){
throw std::out_of_range("illegal date");
}
}
}
<file_sep>/old/gregorian.cpp
#include "gregorian.h"
namespace lab2{
//spot 0 empty to avoid having to do index arithmetic later
const std::string Gregorian::dayNames[8] =
{"INVALID_DAY",
"monday",
"tuesday",
"wednesday",
"thursday",
"friday",
"saturday",
"sunday"};
const std::string Gregorian::monthNames[13] =
{"INVALID_MONTH",
"january",
"february",
"march",
"april",
"may",
"june",
"july",
"august",
"september",
"october",
"november",
"december"};
const int Gregorian::daysInMonth[13] =
{0,31,28,31,30,31,30,31,31,30,31,30,31};
Gregorian::Gregorian(){
// För att få nuvarande tid
time_t mytime = time(NULL);
//set_k_time(mytime);
k_time(&mytime);
// För att få ut datum lokalt
struct tm *t = gmtime(&mytime);
currentYear = t->tm_year + 1900;
currentMonth = t->tm_mon + 1; // månaderna och dagarna
currentDay = t->tm_mday; // indexerade från ETT
julianDayNumber = gregorianDateToJDN(currentYear, currentMonth, currentDay);
}
Gregorian::Gregorian(const Date& d) {
if(d != *this) {
julianDayNumber = d.julianDayNumber;
jdnToGregorian(julianDayNumber);
}
}
Gregorian::Gregorian(int jdn){
julianDayNumber = jdn;
jdnToGregorian(jdn);
}
Gregorian::Gregorian(int year, int month, int day)
{
checkValid(year, month, day);
currentYear = year;
currentMonth = month; // månaderna och dagarna
currentDay = day; // indexerade från ETT
julianDayNumber = gregorianDateToJDN(year, month, day);
}
Gregorian::~Gregorian(void)
{
}
void Gregorian::checkValid(int year, int month, int day) {
if(month < 1 || month > 12 || day < 1 || day > days_this_month(year,month)){
throw std::out_of_range("illegal date given!");
}
}
/**
* Converts a conventional western Gregorian Date to a julian day number
* Algorithm source: http://www.astro.uu.nl/~strous/AA/en/reken/juliaansedag.html
**/
int Gregorian::gregorianDateToJDN(int y, int m, int d){
return (1721425.5 - 1) + (365 * (y - 1)) +
(int)((y - 1) / 4) + (-(int)((y - 1) / 100)) +
(int)((y - 1) / 400) + (int)((((367 * m) - 362) / 12) +
((m <= 2) ? 0 : (isLeapYear(y) ? -1 : -2)) + d);
}
/**
* Converts a Julian Day Number to a Gregorian date.
**/
void Gregorian::jdnToGregorian(int jd){
int dqc, dcent, dquad, leapadj;
double wjd,depoch, quadricent, cent, quad,yindex, yearday;
wjd = (int)(mod_julian_day() +2400000) + 0.5;
depoch = wjd - 1721425.5;
quadricent = floor(depoch / 146097);
dqc = (((int)depoch) % 146097);
cent = (int)(dqc / 36524);
dcent = (dqc %36524);
quad = (int)(dcent / 1461);
dquad = (dcent % 1461);
yindex = (int)(dquad / 365);
currentYear = int((quadricent * 400) + (cent * 100) + (quad * 4) + yindex);
if (!((cent == 4) || (yindex == 4))) {
(currentYear)++;
}
yearday = wjd - gregorianDateToJDN(currentYear, 1, 1);
leapadj = ((wjd < gregorianDateToJDN(currentYear, 3, 1)) ? 0
:
(isLeapYear(currentYear) ? 1 : 2));
currentMonth = int(floor((((yearday + leapadj) * 12) + 373) / 367));
currentDay = int((wjd - gregorianDateToJDN(currentYear, currentMonth, 1)) + 1);
}
const int Gregorian::week_day() const{
int m = month();
int y = year();
if (m < 3){
m += 12;
y -=1;
}
int k = y % 100;
int j = y /100;
int h = ((day() + (int)(((m+1)*26)/10) + k + (int)(k/4) + (int)(j/4) + (5*j))) % 7;
h -= 1;
if(h == 0){
h = 7;
}
if(h == -1){
h = 6;
}
return h;
}
const int Gregorian::year() const{
return currentYear;
}
const int Gregorian::month() const{
return currentMonth;
}
const int Gregorian::day() const{
return currentDay;
}
const int Gregorian::days_per_week() const{
return daysInWeek;
}
const int Gregorian::days_this_month() const {
return days_this_month(currentYear, currentMonth);
}
int Gregorian::days_this_month(int year, int month) const {
if(month == 0) month = 12;
int result = 0;
if(month == 2 && isLeapYear(year)){
result = 29;
}else{
result = daysInMonth[month];
}
return result;
}
const int Gregorian::months_per_year() const{
return monthsInYear;
}
/**
* Method for determining whether or not the given year was a leap year.
* Algorithm source: http://en.wikipedia.org/wiki/Leap_year
**/
bool Gregorian::isLeapYear(int year) const {
if(year % 400 == 0){
return true;
}else if(year % 100 == 0){
return false;
}else if(year % 4 == 0){
return true;
}else{
return false;
}
}
const std::string Gregorian::week_day_name() const{
return dayNames[week_day()];
}
const std::string Gregorian::month_name() const{
printf("\n$$$ month: %d $$$$\n", currentMonth);
fflush(stdout);
if(currentMonth > 12 || currentMonth < 1)
throw std::out_of_range("illegal date given!");
return monthNames[currentMonth];
}
//prefix
Date& Gregorian::operator++(){
julianDayNumber++;
jdnToGregorian(julianDayNumber);
return *this;
}
Date& Gregorian::operator--(){
julianDayNumber--;
jdnToGregorian(julianDayNumber);
return *this;
}
//postfix
Gregorian Gregorian::operator++(int n)
{
Gregorian old(year(), month(), day());
++(*this);
return old;
}
Gregorian Gregorian::operator--(int n)
{
Gregorian old(year(), month(), day());
--(*this);
return old;
}
int Gregorian::mod_julian_day() const{
return int(julianDayNumber - 2400000);
}
void Gregorian::operator+=(int n){
julianDayNumber+= n;
jdnToGregorian(julianDayNumber);
}
void Gregorian::operator-=(int n){
julianDayNumber-= n;
jdnToGregorian(julianDayNumber);
}
Date& Gregorian::add_year(int n){
if(currentDay > days_this_month(currentYear+n, currentMonth))
{
julianDayNumber = gregorianDateToJDN(currentYear + n, currentMonth, days_this_month(currentYear + n,currentMonth));
}else
{
julianDayNumber = gregorianDateToJDN(currentYear + n, currentMonth, currentDay);
}
jdnToGregorian(julianDayNumber);
return *this;
}
Date& Gregorian::add_month(int n) {
int tmpYear = currentYear;
int tmpMonth = currentMonth + n;
tmpYear += tmpMonth/12;
tmpMonth = tmpMonth % 12;
if(tmpMonth <= 0)
{
tmpMonth = 12 + tmpMonth;
--tmpYear;
}
if(currentDay > days_this_month(tmpYear,tmpMonth))
{
julianDayNumber = gregorianDateToJDN(tmpYear, tmpMonth, days_this_month(tmpYear,tmpMonth));
}else
{
julianDayNumber = gregorianDateToJDN(tmpYear, tmpMonth, currentDay);
}
jdnToGregorian(julianDayNumber);
return *this;
}
Gregorian& Gregorian::operator=(const Date& d) {
julianDayNumber = d.julianDayNumber;
updateDateWithJDN(julianDayNumber);
return *this;
}
void Gregorian::updateDateWithJDN(int jdn) {
jdnToGregorian(jdn);
return;
}
int Gregorian::calculateDaysNextMonth() {
int nextMonth = currentMonth+1;
int nextYear = currentYear;
if(nextMonth > 12){ //Rollover
nextYear++;
nextMonth = 1;
}
Gregorian nextDate = Gregorian(nextYear, nextMonth, 1);
int daysNextMonth = nextDate.days_this_month();
return daysNextMonth;
}
}
<file_sep>/date/makefile
ALL:
#g++ -g -O2 -static -std=gnu++0x date.cpp gregorian.cpp kattistime.cpp cprog09lab22b.cpp
g++ -g -Wall date.cpp julian.cpp gregorian.cpp kattistime.cpp test_codea.cpp
<file_sep>/cal/gregorian.h
#pragma once
#include "date.h"
#include "kattistime.h"
#include <time.h>
#include <stdlib.h>
#include <stdio.h>
#include <cstdlib>
#include <math.h>
#include <iterator>
#include <stdexcept>
#include <iostream>
namespace lab2
{
struct day_month_year
{
int day;
int month;
int year;
};
class Gregorian :
public Date
{
public:
Gregorian();
Gregorian(const Date&);
Gregorian(int jdn);
Gregorian(int year, int month, int day);
~Gregorian();
virtual int mod_julian_day() const;
virtual double julian_day() const;
const virtual int year() const;
const virtual int month() const;
const virtual int day() const;
const virtual int week_day() const;
const virtual int days_per_week() const;
const virtual int days_this_month() const;
const virtual int months_per_year() const;
const virtual std::string week_day_name() const;
const virtual std::string month_name() const;
//prefix
virtual Date& operator++();
virtual Date& operator--();
//postfix
virtual Gregorian operator++(int n);
virtual Gregorian operator--(int n);
virtual void operator+=(int n);
virtual void operator-=(int n);
virtual Gregorian& operator=(const Date& d);
virtual Date& add_year(int n);
Date& add_year();
Date& add_month(int n);
virtual Date& add_month();
virtual Date& sub_month();
protected:
virtual bool is_leap_year(int year) const;
bool is_leap_day() const;
virtual int days_in_month(int year, int month) const;
//int calculateDaysNextMonth();
//void jdnToG_Regorian(int jdn);
double gregorian_date_to_JDN(int year, int month, int day) const;
double JDN_to_mod_julian_day(double jdn) const;
static const std::string day_names[8];
static const std::string month_names[13];
static const int days_in_month_array[13];
static const int days_in_week = 7;
static const int months_in_year = 12;
void is_valid(int d, int m, int y) const;
day_month_year JDN_to_gregorian(double jd) const;
};
}
<file_sep>/old/calendar.h
#pragma once
#include <string>
#include <vector>
#include <algorithm>
#include "date.h"
#include "gregorian.h"
#include "Event.h"
namespace lab2 {
using namespace std;
template <class T> class Calendar
{
protected:
template<class U> friend std::ostream& operator<<(std::ostream&,const Calendar<U>&);
public:
T* currentDate;
vector<Event*>* events;
Calendar(void) {
currentDate = new T();
events = new vector<Event*>();
}
template <class S>
Calendar(const Calendar<S> & other) {
if(static_cast<void*>(this) == &other) {
return;
}
currentDate = new T();
*currentDate -= currentDate->mod_julian_day();
*currentDate += other.currentDate->mod_julian_day();
typename vector<Event* >::iterator it;
it = other.events->begin();
events->clear();
for(;it!=other.events->end();it++) {
T* tmpDate = new T((*it)->getDate());
Event eventCopy((*it)->getEventName(), tmpDate);
events->push_back(&eventCopy);
}
}
~Calendar(void) {
}
bool set_date(int year, int month, int day) {
Date* current = currentDate;
try {
currentDate = new T(year, month, day);
delete(current);
}catch (...) {
return false;
}
return true;
}
bool add_event(string eventName) {
return add_event(eventName, new T());
}
bool add_event(string eventName, int day) {
return add_event(eventName, T().month(), day);
}
bool add_event(string eventName, int month, int day) {
return add_event(eventName, T().year(), month, day);
}
bool add_event(string eventName, int year, int month, int day) {
T* d;
try {
d = new T(year, month, day);
}catch(...) {
return false;
}
return add_event(eventName, d);
}
bool remove_event(string name) {
T* d = new T();
return remove_event(name, d);
}
bool remove_event(string name, int day) {
return remove_event(name, T().month(), day);
}
bool remove_event(string name, int month, int day) {
return remove_event(name, T().year(), month, day);
}
bool remove_event(string name, int year, int month, int day) {
T* d;
try{
d = new T(year, month, day);
}catch (...) {
return false;
}
return true;
}
//Assignment operator.
Calendar<T>& operator=(const Calendar& other) {
delete(currentDate); //Clean
this->currentDate = other.currentDate; //Assign
delete[] events; //Clean
this->events = other.events; //Assign
//TODO: copy more values
return *this;
}
//Private, does the heavy lifting once the public ones have checked for valid input and standardized
//their parameters (made a date).
bool add_event(string eventName, Date* d) {
if(indexOf(eventName, *d) > 0) //Exists.
return false;
events->push_back(new Event(eventName, d));
return true;
}
//Private, actually does the removing after doing an exists-check.
bool remove_event(string eventName, Date* d) {
int i = indexOf(eventName, *d);
if(i < 0) //Doesn't exist
return false;
events->erase(events->begin() + i);
return true;
}
//Returns the index of the given event name+date combo, or negative number if not found.
int indexOf(string name, Date& d) {
for(int i = 0; i < events->size(); ++i) {
Event* e = (*events)[i];
if(e->getEventName() == name && e->getDate() == d)
return i;
}
return -1;
}
};
template <class U>
ostream& operator<<(ostream& o, const Calendar<U>& cal){
sort(cal.events->begin(), cal.events->end());
U* start = new U();
vector<Event*>::iterator it;
//First, skip ahead to today
for(it = cal.events->begin(); it != cal.events->end(); ++it){
Date* current = &(*it)->getDate();
if(current->julianDayNumber == start->julianDayNumber) //Todays date, this is where we start.
break; //++it will take care of getting us to start+1, so we're done here
}
for(; it != cal.events->end(); ++it) {
o << *it;
}
delete(start);
return o;
}
}<file_sep>/old/test_code.h
#pragma once
namespace lab2{
class test_code
{
public:
test_code(void);
~test_code(void);
};
}
/*
int main();
}
*/<file_sep>/cal/julian.h
#pragma once
#include "gregorian.h"
#include "kattistime.h"
#include <time.h>
#include <stdlib.h>
#include <stdio.h>
#include <cstdlib>
#include <math.h>
#include <iterator>
#include <stdexcept>
#include <iostream>
namespace lab2
{
class Julian :
public lab2::Gregorian
{
public:
Julian();
Julian(const Date&);
Julian(int jdn);
Julian(int year, int month, int day);
~Julian();
const int day() const;
const int month() const;
const int year() const;
virtual Date& add_year(int n);
//virtual Date& add_month(int n);
virtual Date& add_month();
virtual Date& sub_month();
protected:
virtual bool is_leap_year(int year) const;
//int calculateDaysNextMonth();
double julian_date_to_JDN(int year, int month, int day) const;
day_month_year JDN_to_julian(double jd) const;
const virtual int week_day() const;
};
}
<file_sep>/old/Event.cpp
#include "Event.h"
#include "date.h"
namespace lab2 {
Event::Event(string name, Date* date)
{
this->name = name;
this->date = date;
}
string Event::getEventName() {
return name;
}
Date& Event::getDate() {
return *date;
}
ostream& Event::operator<<(ostream& o) {
o << date << " : " << name << std::endl;
return o;
}
Event::~Event(void)
{
}
}
<file_sep>/old/gregorian.h
#pragma once
#include "date.h"
#include "kattistime.h"
#include <time.h>
#include <stdlib.h>
#include <stdio.h>
#include <cstdlib>
#include <math.h>
#include <iterator>
#include <stdexcept>
namespace lab2{
class Gregorian :
public Date
{
public:
int currentMonth;
int currentYear;
int currentDay;
Gregorian();
Gregorian(const Date&);
Gregorian(int jdn);
Gregorian(int year, int month, int day);
~Gregorian();
const virtual int year() const;
const virtual int month() const;
const virtual int day() const;
const virtual int week_day() const;
const virtual int days_per_week() const;
const virtual int days_this_month() const;
const virtual int months_per_year() const;
const virtual std::string week_day_name() const;
const virtual std::string month_name() const;
//prefix
virtual Date& operator++();
virtual Date& operator--();
//postfix
virtual Gregorian operator++(int n);
virtual Gregorian operator--(int n);
virtual void operator+=(int n);
virtual void operator-=(int n);
virtual Gregorian& operator=(const Date& d);
void updateDateWithJDN(int jdn);
virtual Date& add_year(int n = 1);
virtual Date& add_month(int n = 1);
virtual int mod_julian_day() const;
protected:
virtual bool isLeapYear(int year) const;
int days_this_month(int year, int month) const;
int calculateDaysNextMonth();
void jdnToGregorian(int jdn);
int gregorianDateToJDN(int year, int month, int day);
static const int daysInWeek = 7;
static const int monthsInYear = 12;
static const std::string dayNames[8];
static const std::string monthNames[13];
static const int daysInMonth[13];
void checkValid(int, int, int);
};
}
<file_sep>/old/Rebecca.cpp
#include "Rebecca.h"
namespace lab2{
Rebecca::Rebecca(void)
{
}
const int Rebecca::day() const{
return 5;
}
Rebecca::~Rebecca(void)
{
}
}<file_sep>/old/test_code.cpp
#include "test_code.h"
#include "julian.h"
using namespace lab2;
test_code::test_code(void)
{
}
test_code::~test_code(void)
{
}
/*
int main(){
Gregorian a = Gregorian(2009, 2, 24);
std::cout << "Created Gregorian date 2009-02-24 (Tuesday). Our implementation reports \n";
std::cout << "Greg: " << a.year() << "-" << a.month() << "-" << a.currentDay << ": " << a.week_day_name() << "(" << a.julianDayNumber << ")" << std::endl;
std::cout << "Mod_Julian: " << a.mod_julian_day() << std::endl;
a.add_month(15);
std::cout << "Adding 15 months gives us the date 2010-05-24 (Monday)\n";
std::cout << "Greg: " << a.year() << "-" << a.month() << "-" << a.currentDay << ": " << a.week_day_name() << "(" << a.julianDayNumber << ")" << std::endl;
std::cout << "\n";
std::cout << "Created Gregorian date 2037-07-12 (Sunday). Our implementation reports \n";
Gregorian c = Gregorian(2037, 7, 12);
std::cout << "Greg: " << c.year() << "-" << c.month() << "-" << c.currentDay << ": " << c.week_day_name() << "(" << c.julianDayNumber << ")" << std::endl;
std::cout << "Mod_Julian: " << c.mod_julian_day() << std::endl;
c.add_month(15);
std::cout << "Adding 15 months gives us the date 2038-10-12 (Tuesday)\n";
std::cout << "Greg: " << c.year() << "-" << c.month() << "-" << c.currentDay << ": " << c.week_day_name() << "(" << c.julianDayNumber << ")" << std::endl;
std::cout << "\n";
std::cout << "Created Julian date from Greg 1970-01-15 (Thursday). \n" <<
"This should yield Julian 1970-01-02. Our implementation reports \n";
Julian b = Julian(1970, 1, 15);
std::cout << "Julian: " << b.year() << "-" << b.month() << "-" << b.day() << ": " << b.week_day_name() << "(" << b.julianDayNumber << ")" << std::endl;
std::cout << "Mod_Julian: " << b.mod_julian_day() << std::endl;
b.add_month(15);
std::cout << "Adding 15 months should yield Julian 1971-04-02 (Thursday)\n";
std::cout << "Julian: " << b.year() << "-" << b.month() << "-" << b.currentDay << ": " << b.week_day_name() << "(" << b.julianDayNumber << ")" << std::endl;
std::cout << "\n";
std::cout << "Created Julian date from Greg 1997-04-22 (Tuesday). \n" <<
"This should yield Julian 1997-04-09\n";
Julian d = Julian(1997, 4, 22);
std::cout << "Julian: " << d.year() << "-" << d.month() << "-" << d.day() << ": " << d.week_day_name() << "(" << d.julianDayNumber << ")" << std::endl;
std::cout << "Mod_Julian: " << d.mod_julian_day() << std::endl;
d.add_month(15);
std::cout << "Adding 15 months should yield Julian 1998-07-09 (Wednesday)\n";
std::cout << "Julian: " << d.year() << "-" << d.month() << "-" << d.currentDay << ": " << d.week_day_name() << "(" << d.julianDayNumber << ")" << std::endl;
std::cout << "\n";
//Gregorian c = Gregorian(2010, 10, 1);
//Gregorian d = Gregorian(2010, 9, 31);
//std::cout << "Greg: " << c.year() << "-" << c.month() << "-" << c.currentDay << ": " << c.week_day_name() << "(" << c.julianDayNumber << ")" << std::endl;
//std::cout << "Greg: " << d.year() << "-" << d.month() << "-" << d.currentDay << ": " << d.week_day_name() << "(" << d.julianDayNumber << ")" << std::endl;
std::string asdf;
std::cin >> asdf;
return 0;
}
*/<file_sep>/old/makefile
cal.exe:
g++ -Wall -g -o cal.exe date.cpp gregorian.cpp julian.cpp datetest.cpp kattistime.cpp
<file_sep>/old/Event.h
#pragma once
#include <string>
#include "date.h"
namespace lab2 {
using namespace std;
class Event
{
public:
Event(string, Date* date);
~Event(void);
ostream& operator<<(ostream&);
string getEventName();
Date& getDate();
private:
string name;
Date* date;
};
}
<file_sep>/date/test_codea.cpp
#include "test_code.h"
#include "julian.h"
//#include "Julian.h"
using namespace lab2;
test_code::test_code(void)
{
}
test_code::~test_code(void)
{
}
int main(){
time_t time = 0;
set_k_time(time);
Julian j = Julian(1923, 1, 1);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 1 - Date: 1923-01-01: \n";
std::cout << "\n";
std::cout << "Gregor - Date: " << j.year() << "-" << j.month() << "-" << j.day() << ": " << j.week_day_name() << std::endl;
std::cout << "Actual JDN: " << j.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "23433" << std::endl;
std::cout << "\n";
j = Julian(2014, 5, 22);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 1 - Date: 2014-05-22: \n";
std::cout << "\n";
std::cout << "Gregor - Date: " << j.year() << "-" << j.month() << "-" << j.day() << ": " << j.week_day_name() << std::endl;
std::cout << "Actual JDN: " << (int)j.julian_day() << std::endl;
std::cout << "Expected JDN: " << "56812" << std::endl;
std::cout << "\n";
/*
std::cout << "Julian - Date: " << j.year() << "-" << j.month() << "-" << j.day() << ": " << j.week_day_name() << std::endl;
std::cout << "Actual JDN: " << j.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "54899" << std::endl;
std::cout << "\n\n";
g = Julian(1970, 1, 17);
// j = Julian(1970, 1, 17);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 2 - Date: 1970-1-17: Saturday\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g.year() << "-" << g.month() << "-" << g.day() << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "40603" << std::endl;
std::cout << "\n";
//std::cout << "Julian - Date: " << j.year() << "-" << j.month() << "-" << j.day() << ": " << j.week_day_name() << std::endl;
std::cout << "Actual JDN: " << j.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "40616" << std::endl;
std::cout << "\n\n";
g = Julian();
// j = Julian();
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 3 - Date: Today! all dates should be the same!\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g.year() << "-" << g.month() << "-" << g.day() << ": " << g.week_day_name() << std::endl;
std::cout << " JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
std::cout << "Julian - Date: " << j.year() << "-" << j.month() << "-" << j.day() << ": " << j.week_day_name() << std::endl;
std::cout << " JDN: " << j.mod_julian_day() << std::endl;
std::cout << "\n\n";
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "--------------------------------------------------------------------------------\n";
g = Julian(1923, 3, 1);
//g.add_year(111);
//g.add_month(-120);
//g.add_month(-119);
g.add_year(-10);
// j = Julian(1900, , 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 4 - Date: 1923-3-1: Thursday\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "23479" << std::endl;
std::cout << "\n";
g = Julian(1858, 1, 1);
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 5 - Date: 1858-01-01\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "-1" << std::endl;
std::cout << "\n";
g = Julian(2033, 8, 20);
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 6 - Date: 2033-08-20\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "-1" << std::endl;
std::cout << "\n";
set_k_time(717119999);
g = Julian();
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 7 - Date: 1992-09-21\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "-1" << std::endl;
std::cout << "\n";
set_k_time(717120000);// + 86400/4);
g = Julian();
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 8 - Date: 1992-09-22\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
g = Julian(1992,9,22);
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 9 - Date: 1992-09-22\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
set_k_time(212158799);// + 86400/4);
g = Julian();
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 10 - Date: 1976-09-21\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
set_k_time(0);// + 86400/4);
g = Julian();
// j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 11 - Date: 1970-01-01\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
set_k_time(2008152549);// + 86400/4);
g = Julian();
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 12 - Date: 2033-08-20\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
g = Julian(2093,1,1);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 13 - Date: 2093-01-01\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
*/
return 0;
}
<file_sep>/cal/makefile
ALL:
g++ -g -Wall calendar.cpp date.cpp event.cpp gregorian.cpp julian.cpp cprog09lab23.cpp
<file_sep>/old/test_code_new.cpp
#include "test_code_new.h"
#include "julian.h"
using namespace lab2;
test_code::test_code(void)
{
}
test_code::~test_code(void)
{
}
int main(){
Gregorian g = Gregorian(2009, 2, 24);
Julian j = Julian(2009, 2, 24);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 1 - Date: 2009-2-24: Tuesday\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g.year() << "-" << g.month() << "-" << g.currentDay << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "54886" << std::endl;
std::cout << "\n";
std::cout << "Julian - Date: " << j.year() << "-" << j.month() << "-" << j.currentDay << ": " << j.week_day_name() << std::endl;
std::cout << "Actual JDN: " << j.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "54899" << std::endl;
std::cout << "\n\n";
g = Gregorian(1970, 1, 17);
j = Julian(1970, 1, 17);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 2 - Date: 1970-1-17: Saturday\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g.year() << "-" << g.month() << "-" << g.currentDay << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "40603" << std::endl;
std::cout << "\n";
std::cout << "Julian - Date: " << j.year() << "-" << j.month() << "-" << j.currentDay << ": " << j.week_day_name() << std::endl;
std::cout << "Actual JDN: " << j.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "40616" << std::endl;
std::cout << "\n\n";
g = Gregorian();
j = Julian();
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 3 - Date: Today! all dates should be the same!\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g.year() << "-" << g.month() << "-" << g.currentDay << ": " << g.week_day_name() << std::endl;
std::cout << " JDN: " << g.mod_julian_day() << std::endl;
std::cout << "\n";
std::cout << "Julian - Date: " << j.year() << "-" << j.month() << "-" << j.currentDay << ": " << j.week_day_name() << std::endl;
std::cout << " JDN: " << j.mod_julian_day() << std::endl;
std::cout << "\n\n";
g = Gregorian(1900, 2, 29);
j = Julian(1900, 2, 29);
std::cout << "--------------------------------------------------------------------------------\n";
std::cout << "Test 4 - Date: 1900-2-29: Thursday\n";
std::cout << "\n";
std::cout << "Gregor - Date: " << g.year() << "-" << g.month() << "-" << g.currentDay << ": " << g.week_day_name() << std::endl;
std::cout << "Actual JDN: " << g.mod_julian_day() << std::endl;
std::cout << "Expected JDN: " << "15079" << std::endl;
std::cout << "\n";
std::string asdf;
std::cin >> asdf;
return 0;
}
<file_sep>/old/Rebecca.h
#pragma once
#include "gregorian.h"
namespace lab2 {
class Rebecca :
public Gregorian
{
public:
Rebecca(void);
~Rebecca(void);
const virtual int day() const;
};
}
| beb18573a713fb9c07e44ae5ff0993693f27179b | [
"Makefile",
"C++"
] | 22 | C++ | LukasGlader/Calendar | e2830399a79e331fde188716040cf716260ad174 | 8e881267644bb78ae650703166953aa25f34f9b0 | |
refs/heads/master | <file_sep>package com.example.todo;
public class Tabla {
public static String TABLA_TODO="todo";
public static String CAMPO_NAME="nombre";
public static String CAMPO_DATE="fecha";
public static String CAMPO_COMPLETE="completado";
public static final String CREAR_TABLA_TODO = "CREATE TABLE "+TABLA_TODO+" ("+CAMPO_NAME+" TEXT, "+CAMPO_DATE+" TEXT, "+CAMPO_COMPLETE+" INTEGER)";
}
<file_sep>package com.example.todo;
import java.io.Serializable;
public class Todo implements Serializable {
private String name;
private String date;
private Integer complete;
public Todo(String name, String date, Integer complete) {
this.name = name;
this.date = date;
this.complete = complete;
}
public Todo(){
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public Integer getComplete() {
return complete;
}
public void setComplete(Integer complete) {
this.complete = complete;
}
}
| 4d0e6bb11062c04292c793a82850e8bfe6d6a59f | [
"Java"
] | 2 | Java | Nefarious94/Examen2 | ca0223e016f458cdf46fba49a9f95f2294a67088 | af81ebfb0147251b22b5669d339c6b4d20a8b5a9 | |
refs/heads/master | <repo_name>DimuthuMadushan/EpiDataFuseUI<file_sep>/src/components/createPipeline.js
import React from 'react';
import { Checkbox } from '@material-ui/core';
import Radio from '@material-ui/core/Radio';
import RadioGroup from '@material-ui/core/RadioGroup';
import FormControlLabel from '@material-ui/core/FormControlLabel';
import {Route, BrowserRouter as Router,useParams, useRouteMatch,matchPath , Switch, Link} from 'react-router-dom';
import AddIcon from "@material-ui/icons/Add";
import { Fab, Button } from "@material-ui/core";
import FormControl from '@material-ui/core/FormControl';
import FormLabel from '@material-ui/core/FormLabel';
class CreatePipeline extends React.Component {
state={
pipelines:[{pipelineName:"", status:""}],
display:"none",
UI:"none",
form:"none"
}
createPipeline=()=>{
this.setState((prevState) => ({
pipelines: [...prevState.pipelines,{pipelineName:"", status:""} ]
}),()=>{
let {path, url} = matchPath
console.log(path)
console.log(url)
});
}
handleChange = (e) =>{
let pipelines = [...this.state.pipelines]
var id = pipelines.length
console.log(id)
pipelines[id-1][e.target.id] = e.target.value
pipelines[id-1]["status"] = "idle"
this.setState({pipelines},()=>{
console.log(pipelines)
})
}
handleCreate = () =>{
let display = this.state.display
if(display==="none"){
display ="block"
this.setState({display})
} else {
display ="none"
this.setState({display})
}
}
select=(e)=>{
let UI,form = this.state.display
if(e.target.value=="0"){
UI = "block"
form = "none"
this.setState({UI,form})
}else if(e.target.value=="1"){
form = "block"
UI = "none"
this.setState({UI,form})
}
}
handleSubmit = (e) => {
e.preventDefault()
}
render(){
let pipelines = this.state.pipelines
return(
<div>
<div >
<button className="w3-btn w3-white w3-border w3-border-green w3-round-large" onClick={this.handleCreate} >Create Pipeline</button>
<button className="w3-btn w3-white w3-border w3-border-green w3-round-large w3-right" >Refresh</button>
<h6>
<div className="w3-panel w3-border w3-half" style={{display:this.state.display}}>
<label className="col-25">Using</label>
<br/>
<div>
<RadioGroup aria-label="select" name="select1" value={this.value} onChange={this.select}>
<FormControlLabel value="0" control={<Radio />} label="UI" />
<FormControlLabel value="1" control={<Radio />} label="Config File"/>
</RadioGroup>
</div>
<div style={{display:this.state.UI}}>
<label className="col-25">Pipeline Name</label>
<input id="pipelineName" onChange={this.handleChange} className="col-75"/>
<button className="w3-btn w3-white w3-border w3-border-red w3-round-large" >Cancel</button>
<button className="w3-btn w3-white w3-border w3-border-green w3-round-large" onClick={this.createPipeline} >Create</button>
</div>
<div style={{display:this.state.form}}>
<label className="col-25">Config file</label>
<label htmlFor="upload-photo">
<input
style={{ display: "block" }}
id="upload-file"
name="upload-file"
type="file"
/>
<button className="w3-btn w3-white w3-border w3-border-red w3-round-large" >Cancel</button>
<button className="w3-btn w3-white w3-border w3-border-green w3-round-large" onClick={this.createPipeline}>Create</button>
</label>
</div>
</div>
</h6>
</div>
<h6>
<Router>
<table className="w3-table-all w3-col-50">
<tr>
<th>Pipeline</th>
<th>Status</th>
<th>Custom</th>
</tr>
{pipelines.map((val,idx)=>{
return(
<tr key={idx}>
<td><Link to="/addFeature" >{val.pipelineName}</Link></td>
<td>{val.status}</td>
<td></td>
</tr>
)
})}
</table>
</Router>
</h6>
</div>
)
}
}
export default CreatePipeline;
<file_sep>/src/firebase/firebase.js
import firebase from 'firebase';
const firebaseConfig = {
apiKey: "<KEY>",
authDomain: "epidatafuse.firebaseapp.com",
databaseURL: "https://epidatafuse.firebaseio.com",
projectId: "epidatafuse",
storageBucket: "epidatafuse.appspot.com",
messagingSenderId: "697726385863",
appId: "1:697726385863:web:419e9fc84e35b73fe848e2",
measurementId: "G-4XHGTM838D"
};
const fire = firebase.initializeApp(firebaseConfig);
export default fire;
<file_sep>/src/components/pipeline.js
import React from 'react';
import axios from 'axios';
import {Route,Redirect, BrowserRouter as Router, Switch, Link} from 'react-router-dom';
import GranularityConfig from './granuralityConfig'
import GranularityMappingConfig from './granularityMappingConfig'
import SchemaConfig from './schemaConfig'
import Query from './query';
import SourceConnector from './sourceConnector';
import IngestConfig from './ingestConfig';
import Start from './start';
import { Divider } from '@material-ui/core';
class Pipeline extends React.Component {
handleSubmit = (e) => {
e.preventDefault()
}
render() {
return(
<div>
<Router>
<Link to="/addGranular"><button className="w3-btn w3-white w3-border w3-border-green w3-round-large" >Add New Granularity</button></Link>
<Link to="/addFeature"><button className="w3-btn w3-white w3-border w3-border-green w3-round-large">Add New Feature</button></Link>
<Link to="/addSource"><button className="w3-btn w3-white w3-border w3-border-green w3-round-large">Add New Sources</button></Link>
<Link to="/addGranConfig"> <button className="w3-btn w3-white w3-border w3-border-green w3-round-large">Add Granularity Config</button></Link>
<Link to="/addAggreConfig"><button className="w3-btn w3-white w3-border w3-border-green w3-round-large">Add Granularity Mapping Config</button></Link>
<Switch>
<Route exact path="/addGranular"><SchemaConfig/></Route>
<Route exact path="/addFeature"><IngestConfig/></Route>
<Route exact path="/addSource"><SourceConnector/></Route>
<Route exact path="/addGranConfig"><GranularityConfig/></Route>
<Route exact path="/addAggreConfig"><GranularityMappingConfig/></Route>
</Switch>
<br/><br/>
<table className="w3-table-all w3-col-50">
<tr>
<th>Feature</th>
<th>Spatial</th>
<th>Temporal</th>
<th>Target Spatial</th>
<th>Target Temporal</th>
<th>Data Source</th>
</tr>
<tr >
<td>val 1</td>
<td>val 1</td>
<td>val 1</td>
<td>val 1</td>
<td>val 1</td>
<td>val 1</td>
</tr>
</table>
</Router>
</div>
)
}
}
export default Pipeline;<file_sep>/src/App.js
import React,{useRef, useState} from 'react';
import './App.css';
import './w3.css';
import './Mt.css';
import Home from './components/home'
import SignIn from './components/signin'
import SignUp from './components/signup'
import {Route,Redirect, BrowserRouter as Router, Switch, Link} from 'react-router-dom';
import CustomRouter from './components/router';
import fire from './firebase/firebase';
class App extends React.Component {
constructor(props){
super(props);
this.state = {
user: null,
}
this.authListenr = this.authListener.bind(this);
}
componentDidMount(){
console.log(this.state.user)
this.authListener();
}
authListener(){
fire.auth().onAuthStateChanged((user)=>{
if(user){
this.setState({ user });
} else {
this.setState({user:null});
}
})
}
handleSubmit = (e) => {
e.preventDefault()
}
render(){
return (
<div>
<Switch>
<Route exact path="/">
{this.state.user ? (<Home/>) : (<SignIn/>)}
</Route>
<CustomRouter exact path="/signup" component={SignUp} {...this.state}/>
</Switch>
</div>
);
}
}
export default App;
| 0e410646a45e769e16a2f9a4b3c419d59d9a3126 | [
"JavaScript"
] | 4 | JavaScript | DimuthuMadushan/EpiDataFuseUI | 9fd8729f007cd772b39ab1432b8eb3f5172c4c32 | d7136312377339865d5c1d08d02e81fcd92e5490 | |
refs/heads/main | <repo_name>junyao-d/jr-project-one-react<file_sep>/src/components/Header/components/Navbar/components/Item/Item.jsx
import React from "react";
import "./Item.css";
const Item = ({ href, name }) => (
<a className="item" href={href}>
{name}
</a>
);
export default Item;
<file_sep>/src/components/Header/components/Logo/Logo.jsx
import React from "react";
import "./Logo.css";
const Logo = () => (
<div>
<span className="logo">Logo Components</span>
</div>
);
export default Logo;
<file_sep>/src/components/Header/Header.jsx
import React from "react";
import "./Header.css";
import Logo from "./components/Logo";
import NavBar from "./components/Navbar";
const Header = () => (
<div className="header">
Header Component
<div className="header__logo">
<Logo />
</div>
<div className="header__navbar">
<NavBar />
</div>
</div>
);
export default Header;
<file_sep>/src/components/Header/components/Navbar/Navbar.jsx
import React from "react";
import "./Navbar.css";
const Navbar = () => (
<div className="navbar">
<div className="navbar__item">Item1</div>
<div className="navbar__item">Item2</div>
</div>
);
export default Navbar;
| 6729c2fa4f7373bf7c268ce61d3e4b3023f6fa69 | [
"JavaScript"
] | 4 | JavaScript | junyao-d/jr-project-one-react | 1a233383a270671ca4fe94fafafab9a8f3dc85ef | 53205378f93cd424f617103193468f60d90eb4d5 | |
refs/heads/master | <repo_name>popshia/PokemonBST<file_sep>/PokemonBST/main.cpp
// 10627130 林冠良 & 10627131 李峻瑋 // CodeBlocks 17.12
#include <iostream>
#include <sstream>
#include <fstream>
#include <string>
#include <vector>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <cstdlib>
#include <iomanip>
#include <algorithm>
#include <time.h>
using namespace std ;
typedef struct DataStruct {
string number = "0" ;
string name = "\0" ;
string type1 = "\0" ;
int total = 0 ;
int HP = 0 ;
int ATK = 0 ;
int DEF = 0 ;
string whole = "\0" ;
DataStruct* parent = NULL ;
DataStruct* leftChild = NULL ;
DataStruct* rightChild = NULL ;
DataStruct* same = NULL ;
bool visit = false ;
} DataStruct ;
static ifstream input ;
static ofstream output ;
static string FileN = "0" ;
static int Count = 1 ;
class CollegeHighGoGoGo {
int visit = 0 ;
DataStruct* root = NULL ;
vector<DataStruct*> qualified ;
public:
DataStruct* GetRoot() {
return root ;
} // Get root
int GetHeight( DataStruct* depth ) {
if ( depth == NULL ) return 0 ;
else {
int leftDepth = GetHeight( depth->leftChild ) ;
int rightDepth = GetHeight( depth->rightChild ) ;
if ( leftDepth > rightDepth ) return leftDepth+1 ;
else return rightDepth+1 ;
} //else
} // Get tree height
void Insert( DataStruct* tempData ) {
bool NotYet = true ;
DataStruct* sameWalker = NULL ;
DataStruct* treeWalker = NULL ;
if ( root == NULL ) {
root = tempData ;
tempData->parent = root ;
tempData = NULL ;
// cout << root->HP << endl ;
}
else {
treeWalker = root ;
do {
// cout << treeWalker->HP << "\t" << tempData->HP << endl ;
if ( treeWalker->HP == tempData->HP ) {
sameWalker = treeWalker ;
while ( sameWalker->same != NULL )
sameWalker = sameWalker->same ;
tempData->parent = sameWalker ;
sameWalker->same = tempData ;
NotYet = false ;
// cout << "same" << endl;
} // if
else if ( treeWalker->HP < tempData->HP ) {
if ( treeWalker->rightChild == NULL ) {
tempData->parent = treeWalker ;
treeWalker->rightChild = tempData ;
NotYet = false ;
// cout << "larger" << endl;
} // if
else treeWalker = treeWalker->rightChild ;
} // if
else if ( treeWalker->HP > tempData->HP ) {
if ( treeWalker->leftChild == NULL ) {
tempData->parent = treeWalker ;
treeWalker->leftChild = tempData ;
NotYet = false ;
// cout << "smaller" << endl;
} // if
else treeWalker = treeWalker->leftChild ;
} // if
} while ( NotYet ) ;
} // else
} // Insert by HP, print out the data
void Analyze() {
root = NULL ;
DataStruct* tempData = NULL ;
string value = "\0" ;
getline( input, value ) ; // read the labels in the first line
tempData = new DataStruct ;
cout << "\t#\tName\t\t\t\tType 1\t\tHP\tAttack\tDefense" << endl ;
while ( getline( input, value ) ) {
tempData->whole = value ;
// cout << tempData->whole << endl ;
vector<string> cut ;
string token ;
istringstream cutStream( value ) ;
while ( getline( cutStream, token, '\t' ) ) // cut the token one by one
cut.push_back( token ) ;
tempData->number = cut[0] ;
if ( Count < 10 ) cout << "[ " << Count << "]\t" << cut[0] << "\t" ;
else if ( Count >= 10 && Count < 100 ) cout << "[ " << Count << "]\t" << cut[0] << "\t" ;
else if ( Count >= 100 ) cout << "[" << Count << "]\t" << cut[0] << "\t" ;
// print number
tempData->name = cut[1] ;
if ( strlen( cut[1].c_str() ) < 8 ) cout << cut[1] << "\t\t\t\t" ;
else if ( strlen( cut[1].c_str() ) >= 8 && strlen( cut[1].c_str() ) < 17 ) cout << cut[1] << "\t\t\t" ;
else cout << cut[1] << "\t\t" ;
// print name
tempData->type1 = cut[2] ;
if ( strlen( cut[2].c_str() ) < 8 ) cout << cut[2] << "\t\t" ;
else cout << cut[2] << "\t" ;
// print type
tempData->total = atoi( cut[4].c_str() ) ;
tempData->HP = atoi( cut[5].c_str() ) ;
cout << cut[5] << "\t" ;
// print HP
tempData->ATK = atoi( cut[6].c_str() ) ;
cout << cut[6] << "\t" ;
// print ATK
tempData->DEF = atoi( cut[7].c_str() ) ;
cout << cut[7] << endl ;
Count++ ;
// cout<< tempData->HP<< endl ;
Insert( tempData ) ;
tempData = NULL ;
tempData = new DataStruct ;
} // get the whole file
DataStruct* depth = root ;
cout << "HP Tree Height: " << GetHeight( depth ) << endl << endl ;
} // Analyze the whole input file
void SortAndPrintVisit() {
for ( int one = 0 ; one < qualified.size() ; one++ ) {
for ( int two = one+1 ; two < qualified.size() ; two++ ) {
if ( qualified[one]->HP < qualified[two]->HP ) swap( qualified[one], qualified[two] ) ;
else if ( qualified[one]->HP == qualified[two]->HP ) {
if ( atoi( qualified[one]->number.c_str() ) > atoi( qualified[two]->number.c_str() ) ) {
cout << qualified[one]->number << " " << qualified[two]->number << endl ;
swap( qualified[one], qualified[two] ) ;
}
} // same hp
} // for
} // for
cout << "\t#\tName\t\t\t\tType 1\t\tHP\tTotal\tAttack\tDefense" << endl ;
for ( int i = 0 ; i < qualified.size() ; i++ ) {
if ( i < 9 ) cout << "[ " << i+1 << "]\t" << qualified[i]->number << "\t" ;
else if ( i >= 9 && Count < 100 ) cout << "[ " << i+1 << "]\t" << qualified[i]->number << "\t" ;
else if ( i >= 100 ) cout << "[" << i+1 << "]\t" << qualified[i]->number << "\t" ;
if ( strlen( qualified[i]->name.c_str() ) < 8 ) cout << qualified[i]->name << "\t\t\t\t" ;
else if ( strlen( qualified[i]->name.c_str() ) >= 8 && strlen( qualified[i]->name.c_str() ) < 17 ) cout << qualified[i]->name << "\t\t\t" ;
else cout << qualified[i]->name << "\t\t" ;
// print name
if ( strlen( qualified[i]->type1.c_str() ) < 8 ) cout << qualified[i]->type1 << "\t\t" ;
else cout << qualified[i]->type1 << "\t" ;
// print type
cout << qualified[i]->total << "\t" ;
// print total
cout << qualified[i]->HP << "\t" ;
// print HP
cout << qualified[i]->ATK << "\t" ;
// print ATK
cout << qualified[i]->DEF << endl ;
} // print out the datas
cout << "Number of visited nodes = " << visit << endl << endl ;
visit = 0 ;
qualified.clear() ;
} // Sort the qualified vector
void Filter( int data, DataStruct* johnnyWalker ) {
DataStruct* sameWalker = NULL ;
if ( johnnyWalker == NULL ) return ;
if ( johnnyWalker->HP >= data ) {
if ( johnnyWalker->same == NULL ) qualified.push_back( johnnyWalker ) ;
else {
sameWalker = johnnyWalker ;
while ( sameWalker != NULL ) {
qualified.push_back( sameWalker ) ;
sameWalker = sameWalker->same ;
} // same
} // else
} // found
if ( johnnyWalker->HP >= data ) Filter( data, johnnyWalker->leftChild ) ;
Filter( data, johnnyWalker->rightChild ) ;
if ( johnnyWalker->visit == false ) {
visit++ ;
johnnyWalker->visit = true ;
} // check if visit or not
} // Filter and save to new vector
void Delete() {
DataStruct* Largest = GetRightMost() ;
DataStruct* temp = NULL ;
if ( root == NULL ) cout << "BST empty!" << endl ;
else {
if ( Largest->same ) {
if( root == Largest ) {
cout << Largest->whole << endl ;
//cout<< "6"<< endl ;
temp = Largest->leftChild;
root = Largest->same;
Largest->same->parent = root;
Largest->same->leftChild = temp;
temp->parent = root;
} // if
else{
cout << Largest->whole << endl ;
//cout << "1" << endl ;
temp = Largest->same ;
temp->leftChild = Largest->leftChild ;
Largest = Largest->parent ;
delete Largest->rightChild;
Largest->rightChild = temp ;
temp->parent = Largest;
} //else
} // if
else if( root == Largest ) {
//cout<< "5"<<endl;
cout << Largest->whole << endl ;
root = Largest->leftChild;
if( Largest->leftChild ) Largest->leftChild->parent = root;
} // if
else if ( Largest->leftChild && !Largest->same ) {
cout << Largest->whole << endl ;
//cout << "3" << endl ;
temp = Largest->leftChild ;
Largest = Largest->parent ;
delete Largest->rightChild;
Largest->rightChild = temp ;
temp->parent = Largest;
} // if
else{
cout << Largest->whole << endl ;
//cout << "4" << endl ;
temp = Largest;
Largest = Largest->parent ;
Largest->rightChild = NULL;
temp->parent = NULL;
} // else
} // else
} // Delete and minus tree high
DataStruct* GetRightMost() {
DataStruct* walk = root ;
if ( walk->rightChild == NULL ) return walk ;
while ( walk->rightChild != NULL )
walk = walk->rightChild ;
return walk ;
} // Get the biggest data
} ;
int main() {
int command = 0 ;
bool continueOrNot = false ;
CollegeHighGoGoGo dataBase ;
do {
cout << "********************************************************" << endl ; // welcome message
cout << "***** Pokemon BST *****" << endl ;
cout << "***** 0 : Quit *****" << endl ;
cout << "***** 1 : Input by HP, and count the tree height *****" << endl ;
cout << "***** 2 : Filter and show *****" << endl ;
cout << "***** 3 : Delete the largest node *****" << endl ;
cout << "********************************************************" << endl ;
cout << endl << "Please enter your choice:" << endl ;
cin >> command ; // read in user command
cout << endl ;
if ( command == 0 ) { // bye :(((
cout << "Bye :(((" << endl ;
return 0 ;
} // quit
else if ( command > 3 || command < 0 ) {
cout << "Error command! please enter an acceptable command:" << endl << endl ;
continueOrNot = true ;
} // wrong command
else if ( command == 1 ) { // read, count and copy
bool function1Confirm = false ;
do {
cout << "Please enter the file you want to analyze or [0] to quit:" << endl ;
cin >> FileN ;
if ( FileN == "0" ) {
function1Confirm = true ;
continueOrNot = true ;
} // quit
else {
string fileName = "input" + FileN + ".txt" ;
input.open( fileName.c_str() ) ;
if ( input.is_open() ) {
dataBase.Analyze() ;
function1Confirm = true ;
continueOrNot = true ;
} // open successfully
else cout << "***** " << fileName << " does not exist! *****" << endl ;
} // open file and input data to BST
} while( ! function1Confirm ) ;
Count = 0 ;
FileN = "0" ;
input.close() ;
output.close() ;
} // mission 1
else if ( command == 2 ) {
bool function2Confirm = false ;
do {
if ( dataBase.GetRoot() == NULL ) {
cout << "***** Execute Mission 1 first ! *****" << endl << endl ;
function2Confirm = true ;
continueOrNot = true ;
} // first time
else {
cout << "Type in a Threshold ( a positive integer ) or [0] to quit:" << endl ;
cin >> FileN ;
if ( atoi( FileN.c_str() ) < 1 || atoi( FileN.c_str() ) > dataBase.GetRightMost()->HP ) {
cout << "***** " << FileN << " is not in the range of [1," << dataBase.GetRightMost()->HP << "] *****" << endl ;
cout << "Try again!" << endl ;
cin >> FileN ;
function2Confirm = true ;
} // out of range
else {
DataStruct* johnnyWalker = dataBase.GetRoot() ;
dataBase.Filter( atoi( FileN.c_str() ), johnnyWalker ) ;
dataBase.SortAndPrintVisit() ;
function2Confirm = true ;
} // else
} // function run
} while ( ! function2Confirm ) ;
Count = 0 ;
FileN = "0" ;
input.close() ;
output.close() ;
} // mission 2
else if ( command == 3 ) {
bool function3Confirm = false ;
do {
if ( dataBase.GetRoot() == NULL ) {
cout << "***** Execute Mission 1 first ! *****" << endl << endl ;
function3Confirm = true ;
continueOrNot = true ;
} // first time
else {
DataStruct* johnnyWalker = dataBase.GetRoot() ;
cout << "# Name Type 1 Type 2 Total HP Attack Defense Sp. Atk Sp. Def Speed Generation Legendary" << endl ;
dataBase.Delete() ;
johnnyWalker = dataBase.GetRoot() ;
cout << "HP tree height = " << dataBase.GetHeight( johnnyWalker ) << endl ;
function3Confirm = true ;
} // delete
} while ( ! function3Confirm ) ;
} // mission 3
} while( continueOrNot ) ;
} // Main function
| 1f5c65c704b6eec2895d92a837ae576cf287e778 | [
"C++"
] | 1 | C++ | popshia/PokemonBST | 48dbb53659828e68850e9fba15fd44130442f7be | 57876b310d455c985020326ea760b5a280a601ec | |
refs/heads/master | <repo_name>jloriag/java-db<file_sep>/src/main/java/sql.sql
-- phpMyAdmin SQL Dump
-- version 4.7.0
-- https://www.phpmyadmin.net/
--
-- Servidor: localhost
-- Tiempo de generación: 20-10-2017 a las 12:45:57
-- Versión del servidor: 10.1.25-MariaDB
-- Versión de PHP: 5.6.31
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Base de datos: `prueba`
--
-- --------------------------------------------------------
--
-- Estructura de tabla para la tabla `curso`
--
CREATE TABLE `curso` (
`id` int(10) NOT NULL,
`descripcion` varchar(200) NOT NULL,
`estado` bit(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Volcado de datos para la tabla `curso`
--
INSERT INTO `curso` (`id`, `descripcion`, `estado`) VALUES
(1, 'JAVA Basico', b'1'),
(2, 'Nuevo', b'1'),
(3, 'NuevoAAA', b'1'),
(4, 'ABC123', b'1');
--
-- Índices para tablas volcadas
--
--
-- Indices de la tabla `curso`
--
ALTER TABLE `curso`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT de las tablas volcadas
--
--
-- AUTO_INCREMENT de la tabla `curso`
--
ALTER TABLE `curso`
MODIFY `id` int(10) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; | 6d09bbb072f654cd9e2410a2dfd8912485934620 | [
"SQL"
] | 1 | SQL | jloriag/java-db | b644898c825843b154e9d5d6975ccad3fb0cb0bb | 853d28a86c059222327a8d66c796c13646f6a2cc | |
refs/heads/main | <repo_name>yms60006/sixfours<file_sep>/README.md
# sixfours
* hello
* world<file_sep>/inwoox-trekker_youtubecrawler-c7e8169ccf93/YoutubeCrawler/YoutubeCrawler/Form1.cs
using OpenQA.Selenium;
using OpenQA.Selenium.Chrome;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Windows.Forms;
using CefSharp;
using CefSharp.WinForms;
namespace YoutubeCrawler
{
public partial class Form1 : Form
{
ChromeDriverService service;
ChromeOptions options;
IWebDriver driver;
public Form1()
{
InitializeComponent();
service = ChromeDriverService.CreateDefaultService();
service.HideCommandPromptWindow = true;
options = new ChromeOptions();
options.AddArguments("headless");
driver = new ChromeDriver(service, options);
}
ArrayList array = new ArrayList();
private void Recommand_Btn_Click(object sender, EventArgs e)
{
array.Clear();
listBox1.Items.Clear();
PictureBox[] pictureBoxes = { pictureBox1, pictureBox2, pictureBox3, pictureBox4, pictureBox5, pictureBox6};
Button[] buttons = { button1,button2,button3,button4,button5,button6 };
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
driver.Url = "https://www.youtube.com/";
int k = 0;
List<IWebElement> elements1 = driver.FindElements(By.TagName("ytd-rich-item-renderer")).ToList();
foreach(IWebElement x in elements1)
{
if (x.Text == "") continue;
listBox1.Items.Add(x.Text);
array.Add(x.FindElement(By.Id("thumbnail")).GetAttribute("href"));
try
{
if (k < 6)
{
buttons[k].Text = x.Text;
pictureBoxes[k].SizeMode = PictureBoxSizeMode.StretchImage;
pictureBoxes[k].ImageLocation = x.FindElement(By.Id("img")).GetAttribute("src").Split('?')[0];
k++;
}
}
catch { }
}
}
private void YoutubeSearch_Btn_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
browser = new ChromiumWebBrowser(@"https://www.youtube.com/results?search_query=" + textBox1.Text);
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void ProcessKill_Btn_Click(object sender, EventArgs e)
{
try
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach(Process x in process) x.Kill();
panel1.Controls.Remove(browser);
}
catch { }
}
public ChromiumWebBrowser browser;
private void button1_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
browser = new ChromiumWebBrowser(array[0].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void button2_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
IWebDriver driver = new ChromeDriver(service, options);
browser = new ChromiumWebBrowser(array[1].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void button3_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
IWebDriver driver = new ChromeDriver(service, options);
browser = new ChromiumWebBrowser(array[2].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void button4_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
IWebDriver driver = new ChromeDriver(service, options);
browser = new ChromiumWebBrowser(array[3].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void button5_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
IWebDriver driver = new ChromeDriver(service, options);
browser = new ChromiumWebBrowser(array[4].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void button6_Click(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
IWebDriver driver = new ChromeDriver(service, options);
browser = new ChromiumWebBrowser(array[5].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void listBox1_SelectedIndexChanged(object sender, EventArgs e)
{
Process[] process = Process.GetProcessesByName("CefSharp.BrowserSubprocess");
foreach (Process x in process)
{
x.Kill();
}
panel1.Controls.Remove(browser);
if (Cef.IsInitialized.Equals(false)) Cef.Initialize(new CefSettings());
browser = new ChromiumWebBrowser(array[listBox1.SelectedIndex].ToString());
browser.Dock = DockStyle.Fill;
panel1.Controls.Add(browser);
}
private void Form1_FormClosing(object sender, FormClosingEventArgs e)
{
Process[] process = Process.GetProcessesByName("chrome");
foreach (Process x in process) x.Kill();
process = Process.GetProcessesByName("chromedriver");
foreach (Process x in process) x.Kill();
}
}
}
| 4c2548c92186e7962107ea05c4c60dcd7cb091bc | [
"Markdown",
"C#"
] | 2 | Markdown | yms60006/sixfours | 949a582b39c3bc910582ac95f5acbc4d8d15408b | 1ad84f5f561f3bc981a76b43d4674453b6424b7b | |
refs/heads/main | <repo_name>JaiberS/holbertonschool-interview<file_sep>/0x01-insert_in_sorted_linked_list/0-insert_number.c
#include "lists.h"
/**
* insert_node - adds a new node at a listint_t list.
* @n: new data
* @head: first node
*
* Return: the address of the new element, or NULL if it failed
*/
listint_t *insert_node(listint_t **head, int n)
{
listint_t *new, *head2, *ant;
unsigned int i = 0;
if (head == NULL) {
return (0);
}
new = malloc(sizeof(listint_t));
if (new == NULL)
{
free(new);
return (NULL);
}
if (*head == NULL)
{
new->n = n;
new->next = NULL;
*head = new;
return (new);
}
head2 = *head;
ant = NULL;
for (;head2 != NULL && head2->n < n; ant = head2, head2 = head2->next, i++)
;
new->n = n;
if (ant != NULL)
{
ant->next = new;
new->next = head2;
}
else
{
new->next = *head;
*head = new;
}
return (new);
}<file_sep>/0x02-heap_insert/1-heap_insert.c
#include "binary_trees.h"
/**
* heap_insert - Write a function that inserts a value into a Max Binary Heap
* @root: is a double pointer to the root node of the Heap
* @value: is the value store in the node to be inserted
*
* Return: pointer to the inserted node, or NULL on failure
*/
heap_t *heap_insert(heap_t **root, int value)
{
heap_t *new, *head;
int max_level = 0;
if (*root == NULL) {
new = malloc(sizeof(heap_t));
if (new == NULL) {
return (NULL);
}
new->right = NULL;
new->left = NULL;
new->n = value;
new->parent = NULL;
*root = new;
return (*root);
}
head = *root;
while (head->right != NULL) {
head = head->right;
max_level++;
}
head = *root;
new = heap_insert_with_levels(root, value, 0, max_level + 1);
return (new);
}
/**
* heap_insert_with_levels - Write a function that inserts a value into a Max Binary Heap
* @root: is a double pointer to the root node of the Heap
* @value: is the value store in the node to be inserted
*
* Return: pointer to the inserted node, or NULL on failure
*/
heap_t *heap_insert_with_levels(heap_t **root, int value, int level, int max_level)
{
heap_t *new, *head;
head = *root;
if (level == max_level) {
while(head->parent != NULL) {
head = head->parent;
}
new = heap_insert_with_levels(&head->right, value, level + 1, max_level);
return (new);
}
if (head->left == NULL) {
new = malloc(sizeof(heap_t));
if (new == NULL) {
return (NULL);
}
new->right = NULL;
new->left = NULL;
new->n = value;
head->left = new;
new->parent = head;
while (head != NULL && new->n > head->n) {
value = head->n;
head->n = new->n;
new->n = value;
new = head;
head = head->parent;
}
} else if (head->right == NULL) {
new = malloc(sizeof(heap_t));
if (new == NULL) {
return (NULL);
}
new->right = NULL;
new->left = NULL;
new->n = value;
head->right = new;
new->parent = head;
while (head != NULL && new->n > head->n) {
value = head->n;
head->n = new->n;
new->n = value;
new = head;
head = head->parent;
}
} else {
new = heap_insert_with_levels(&head->left, value, level + 1, max_level);
}
return (new);
}
<file_sep>/0x02-heap_insert/README.md
# holbertonschool-interview
Data structures
Please use the following data structures and types for binary trees. Don’t forget to include them in your header file.<file_sep>/0x00-lockboxes/0-lockboxes.py
#!/usr/bin/python3
def canUnlockAll(boxes):
if not isinstance(boxes, list) or len(boxes) == 0:
return False
for box in boxes:
if not isinstance(box, list):
return False
if len(boxes) == 1:
return True
keys = [0]
for j in keys:
for i in boxes[j]:
if 0 < i < len(boxes) and i not in keys:
keys.append(i)
if len(keys) == len(boxes):
return True
return False
| 19317a2fc1c94551c28c9f33b58324646f544cbf | [
"Markdown",
"C",
"Python"
] | 4 | C | JaiberS/holbertonschool-interview | faceabd6126fe7f3b617f48b4c5c8b15c29a8710 | 31f82ec17cd28590670c046ee0d2e14f1e53cda3 | |
refs/heads/master | <file_sep>#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
import lxml
import os.path
import time
import re
from operator import itemgetter
import json
# main url
url = 'https://avito.ru/'
# some http params
user_agent = {'user_agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36'}
# settings to complete http query
settings = {
'city': 'moskva',
'query': 'бультерьер'
}
# constructing query-string with all setting
def construct_query(url, settings, page):
full_query = '{0}{1}?p={2}&q={3}'.format(url, settings['city'], page, settings['query'])
return full_query
# making an http request
def get_html(full_query):
r = requests.get(full_query, params=user_agent)
html = r.text
return html
# getting first page soup with count_pages we will know how long pagination is
def get_first_page_soup():
full_query = construct_query(url, settings, page=1)
html = get_html(full_query)
soup = BeautifulSoup(html, 'lxml')
return soup
# getting any page soup
def get_any_page_soup(page):
full_query = construct_query(url, settings, page)
html = get_html(full_query)
soup = BeautifulSoup(html, 'lxml')
return soup
# count how long pagination is
def count_pages():
full_query = construct_query(url, settings, page=1)
html = get_html(full_query)
soup = BeautifulSoup(html, 'lxml')
pagination_last_child_href = soup.find_all('a', class_='pagination-page')[-1]['href']
p = re.findall(r'p=\d+', pagination_last_child_href)[0]
pages = re.findall(r'\d+', p)[0]
return pages
# parsing тeeded description of items
def get_item_description(soup, id=1):
# all description can be found in one div
item_cards = soup.find_all('div', class_='description item_table-description')
# photo is in another element
photos_divs = soup.find_all('div', class_='item-photo')
all_photos = []
n = 0
for element in photos_divs:
photo = element.find('img')['src'].replace('//', '')
all_photos.append(('photo', photo))
# make empty array to collect item descriptions
all_bullys = []
# loop through items to collect data
for card, photo in zip(item_cards, all_photos):
item = {}
header = card.find_all('span', itemprop="name")
price = card.find_all('span', class_="price")
place = card.find_all('div', class_="data")
date = card.find_all('div', class_="js-item-date c-2")
link = card.find_all('a', class_="item-description-title-link")
item["id"] = id
# tuple with photo appends with this strange method either will not work
item.update((photo, ))
item["header"] = header[0].get_text()
header_filter = ['стафф', 'вязка', 'джек']
if any(filter in item['header'].lower() for filter in header_filter):
continue
item["price"] = price[0].get_text().replace('₽', '').replace(' ', '').strip()
try:
item["place"] = place[0].select("p:nth-of-type(2)")[0].get_text().replace(u'\xa0', u' ')
except:
item["place"] = "Местонахождение не указано"
# filter on date. some custom logic =)
item["date"] = date[0].get_text().strip()
h = ['час', 'часа', 'часов']
d = ['день', 'дней', 'дня']
w = ['неделя', 'недели', 'недель', 'неделю']
if any(x in item['date'] for x in h):
item['date'] = 0
elif any(x in item['date'] for x in d):
multiplier = 1
days = int(item['date'][0]) * multiplier
item['date'] = days
elif any(x in item['date'] for x in w):
multiplier = 7
days = int(item['date'][0]) * multiplier
item['date'] = days
else:
item['date'] = 30
item["link"] = 'https://avito.ru' + link[0]['href']
# filter items by price
statements = ['Ценанеуказана', 'Бесплатно', 'Договорная']
if item['price'] not in statements:
if (10000 <= int(item['price']) <= 25000):
all_bullys.append(item)
id += 1
# returning id for fix starting new count in pagination loop
return all_bullys, id
def read_pages():
# counting pages with count_pages()
pages = int(count_pages()) + 1
# generate counter
page_counter = [i for i in range(pages) if i > 0]
id = 1
# list of all cards
all_cards = []
for page in page_counter:
# simply getting soup on every page
soup = get_any_page_soup(page)
# getting list of items with data and new_id for start new page with count of 51
item_cards, new_id = get_item_description(soup, id)
id += new_id
all_cards.append(item_cards)
time.sleep(3)
# getting flat list
all_cards_flat = []
for page in all_cards:
for card in page:
all_cards_flat.append(card)
return all_cards_flat
# sort adverts by date
def sort_by_date(all_cards_flat):
# create sorted array of dictionaries
sorted_cards = sorted(all_cards_flat, key=lambda k: int(k['date']))
return sorted_cards
# slice top ten adverts
def get_top_ten(sorted_cards):
top_ten = sorted_cards[0:10]
return top_ten
#def main():
#main()
<file_sep>#!/usr/bin/env python3
import json
import requests
import avito_parser as ap
import time
# this function reads bot settings like ali token and chat id from json file
def read_token(filename):
with open(filename, 'r') as f:
bot_settings = json.load(f)
token = bot_settings['token']
chat_id = bot_settings['chat_id']
return token, chat_id
# save settings to variables
token, chat_id = read_token('bot_settings.json')
# construct url to make request to tg api
url = 'https://api.telegram.org/bot{0}/'.format(token)
# make http request with url + method
def make_request(method):
r = requests.get(url + method)
return r.text
# this can check updates
def get_updates():
updates = make_request('getUpdates')
return updates
# send photo api method
def send_photo(photo):
query_string = 'sendPhoto?chat_id={0}&photo={1}&parse_mode=HTML&disable_notification=true'.format(chat_id, photo)
make_request(query_string)
# send message api method
def send_message(formatted_text):
query_string = 'sendMessage?chat_id={0}&text={1}&parse_mode=HTML&disable_notification=true'.format(chat_id, str(formatted_text))
make_request(query_string)
# format and send message and photo
def format_and_send():
# call parser to grab data
cards = ap.get_top_ten(ap.sort_by_date(ap.read_pages()))
for card in cards:
# construct and format text
text = '*****', '<b>' + card['header'] + '</b>', 'Цена: ' + str(card['price']), 'Локация: ' + card['place'], str(card['date']) + ' дня назад', '<a href="' + card['link'] + '">Ссылка</a>' + '\n'
formatted_text = '\n'.join(text)
# variable to save foto src url
photo = card['photo']
# send all together
send_message(formatted_text)
time.sleep(1)
send_photo(photo)
time.sleep(2)
def main():
format_and_send()
main()
| 60820600e579bb2951b66e0d2b7b38267bdd08b5 | [
"Python"
] | 2 | Python | m3xan1k/avito_parser | 6065004c80cbd99cd9593e20c85252fd476db37b | fd798c81b5f42c0ccaf4db8792ccd8c734efe1f8 | |
refs/heads/main | <file_sep># orders
Generate the Order Number and fetch the order numbers
| 79a3c2fd51707e20816d661b77ad03e8d984e0ee | [
"Markdown"
] | 1 | Markdown | sunilkalyan08/orders | c35f03b465b4071b65586124b459d4ddec9613ff | 3dde41682d51db74e314b356d42732c8fd242fc1 | |
refs/heads/master | <file_sep>using Microsoft.EntityFrameworkCore;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Training.Models
{
public class Product
{
public int ProductId { get; set; }
public int ProductCd { get; set; }
public string ProductName { get; set; }
[Column(TypeName = "decimal(7, 2)")]
public decimal? Price { get; set; }
[EnumDataType(typeof(UnitClass))]
public UnitClass Unit { get; set; }
[EnumDataType(typeof(TaxClass))]
public TaxClass Tax { get; set; }
}
public enum UnitClass
{
Pieces = 0,
Kilogram = 1
}
public enum TaxClass
{
TaxIn = 0,
TaxOut = 1
}
}
<file_sep>#!/bin/bash
echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bash_profile
echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
<file_sep># -*- mode: ruby -*-
# vi: set ft=ruby ts=2 sts=2 sw=2 et :
Vagrant.configure(2) do |config|
config.vm.box = "centos/7"
config.vm.network "private_network", ip: "192.168.55.5"
config.vm.synced_folder "./src", "/home/vagrant/src"
config.vm.provider "virtualbox" do |vb|
vb.memory = 4096
end
config.vm.provision "shell", inline: <<-SHELL
# asp.net core
rpm -Uvh https://packages.microsoft.com/config/rhel/7/packages-microsoft-prod.rpm
yum -y install dotnet-sdk-2.1
# sqlserver
curl -o /etc/yum.repos.d/mssql-server.repo https://packages.microsoft.com/config/rhel/7/mssql-server-2017.repo
yum install -y mssql-server
curl -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/7/prod.repo
yum install -y unixODBC-devel
SHELL
end
<file_sep># dotnet-core-mvc
README, and instead for LOG
### init
at first, you must prepare files.
and go to directory
```sh
$ git clone https://github.com/rozeroze/dotnet-core-mvc.git
$ cd dotnet-core-mvc
```
execute `vagrant up`.
if vagrant-box 'centos/7' is not founded, retry it after `vagrant box add`
```sh
$ vagrant box add centos/7
$ vagrant up
```
if mount error occurred
```sh
$ vagrant plugin install vagrant-vbguest
$ vagrant reload
```
nevertheless mount error occurred,
`vagrant ssh` (into vagrant) and do it(following)
```sh
$ sudo yum -y update kernel
$ sudo yum -y install kernel-devel kernel-headers dkms gcc gcc-c++
```
`logout` (out of vagrant) and `vagrant reload`
### start
if vagrant is not created, or aborted
```sh
$ vagrant up
```
into vagrant
```sh
$ vagrant ssh
```
##### install dotnet packages and tools
```sh
$ cd ~/src
$ sh dotnet-package.sh
```
##### sqlserver install, and it's tool
setup sqlserver
```sh
$ cd ~/src
$ sudo /opt/mssql/bin/mssql-conf setup
```
> sqlserver-setup
> * sqlserver-edition: `2.Developer`
> * administrator-password: `<PASSWORD>`
you can choice other edition and other password.
at that time, you have a thing whose must do
* other-edition choice
* license authorization
* other-password registered
* you must change `Training/appsettings.json`
* L4: __ConnectionStrings.TrainingContext.Password__ to your-password
install mssql-tools
```sh
$ sudo yum install -y mssql-tools
$ sh mssql-tool.sh
$ source ~/.bashrc
```
> accept license: YES
##### database migration
if sqlserver is inactive, activate it
```sh
$ # check status
$ systemctl status mssql-server
$ # if it is inactive
$ sudo systemctl start mssql-server
```
create database in sqlserver
```sh
$ sqlcmd -U SA -p -i create-database.sql
Password: <PASSWORD>
```
database migration
```sh
$ cd ~/src/Training
$ dotnet ef migrations add InitialCreate
$ dotnet ef database update
```
##### run the project
```sh
$ cd ~/src/Training
$ dotnet run
```
open your browser -> http://192.168.55.5:8000
### history (instead for log)
##### Initial
```sh
$ cd ~/src
$ dotnet new mvc -n Training
$ cd Training
$ dotnet add package Microsoft.EntityFrameworkCore.Sqlite
$ vi Models/Model.cs
$ dotnet ef migrations add InitialCreate
$ dotnet ef database update
$ dotnet tool install -g dotnet-aspnet-codegenerator
$ dotnet add package Microsoft.VisualStudio.Web.CodeGeneration.Design
$ dotnet restore
$ dotnet aspnet-codegenerator controller -name BlogsController -m Blog -dc BloggingContext --relativeFolderPath Controllers --useDefaultLayout --referenceScriptLibraries
```
> https://docs.microsoft.com/ja-jp/ef/core/get-started/aspnetcore/new-db?tabs=netcore-cli
##### Add Identity
```sh
$ dotnet new mvc --auth Individual -n Training
$ dotnet add package Microsoft.VisualStudio.Web.CodeGeneration.Design --version 2.1.6
$ dotnet restore
```
> https://docs.microsoft.com/ja-jp/aspnet/core/security/authentication/identity?view=aspnetcore-2.2&tabs=netcore-cli
##### Remove 'yum update' & 'dotnet packages'
```sh
$ vagrant box update
```
'centos/7' updated to '1811.02' (2018-12-21)
```sh
$ vagrant up
$ vagrant ssh
$ cd ~/src
$ sh init.sh
```
##### Install SQL-Server
```sh
$ sudo curl -o /etc/yum.repos.d/mssql-server.repo https://packages.microsoft.com/config/rhel/7/mssql-server-2017.repo
$ sudo yum install -y mssql-server
$ sudo /opt/mssql/bin/mssql-conf setup
```
- 2) Developer
ERROR: the installing sqlserver is neseccary least 2000 megabytes.
edit Vagrantfile 'vb.memory = 2048' -> '4096'
```sh
$ vagrant halt
$ vagrant up
```
and after vagrant halt & up, retry it
```sh
$ sudo curl -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/7/prod.repo
$ sudo yum install -y mssql-tools unixODBC-devel
$ echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bash_profile
$ echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
$ source ~/.bashrc
```
<file_sep>#!/bin/bash
cd ~/src/Training
#dotnet add package Microsoft.EntityFrameworkCore.Sqlite --version 2.1.0
#dotnet remove package Microsoft.EntityFrameworkCore.Sqlite
dotnet add package Microsoft.EntityFrameworkCore.SqlServer --version 2.1.1
dotnet tool install -g dotnet-aspnet-codegenerator --version 2.1.6
dotnet add package Microsoft.VisualStudio.Web.CodeGeneration.Design --version 2.1.6
dotnet restore
<file_sep>create database Training collate Japanese_CS_AI
| b0c6efd18f64b7400b4de7a1476eb5d5ca636f0d | [
"SQL",
"Ruby",
"Markdown",
"C#",
"Shell"
] | 6 | C# | rozeroze/dotnet-core-mvc | 64acbc5176678cd6af54e7061af6d4807f478fa1 | 4118808b3c9f950a7e8771186559533237eabcc3 | |
refs/heads/master | <repo_name>MoLu10086/aaa<file_sep>/aaa/src/main/java/A.java
/**
* @Author:MoLu.
* @Date:2019/6/8
* @Time:20:32
**/
public class A {
public static void main(String[] args) {
System.out.println("hello,world...");
System.out.println("hello,world....");
System.out.println("hello,world.....");
System.out.println("hello,world......");
System.out.println("hello,world.......");
System.out.println("hello,world........");
System.out.println("hello,world.........");
}
}
| b66156d1c4a896a695f51eb33d7f84e53bc8e205 | [
"Java"
] | 1 | Java | MoLu10086/aaa | 1c7e4705f2b494dba19d9b1629a074d7ffb85142 | 6e961435a73566b1a2fe2f5fd351d84beb3c1b8f | |
refs/heads/master | <repo_name>MartinAronsen/storm<file_sep>/hooks/Db.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Db extends _HOOK_CLASS_
{
protected $start = null;
protected $final = null;
protected $currentQ = 1;
public function query( $query, $log = true, $read = false )
{
if ( defined( 'CJ_STORM_PROFILER_DISABLE_DB' ) and CJ_STORM_PROFILER_DISABLE_DB ) {
return parent::query( $query, $log, $read );
}
$dbMem = true;
if ( defined( 'CJ_STORM_PROFILER_DISABLE_DB_MEM' ) and CJ_STORM_PROFILER_DISABLE_DB_MEM ) {
$dbMem = false;
}
if ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) {
$this->start = microtime( true );
if ( $dbMem ) {
\IPS\storm\Profiler::i()->memoryStart();
}
$return = parent::query( $query, true );
$this->final = microtime( true ) - $this->start;
$this->log( $query );
$this->sendToProfiler();
return $return;
}
return parent::query( $query, $log, $read );
}
public function log( $query, $server = null )
{
if ( defined( 'CJ_STORM_PROFILER_DISABLE_DB' ) and CJ_STORM_PROFILER_DISABLE_DB ) {
parent::log( $query, $server );
}
$dbMem = true;
if ( defined( 'CJ_STORM_PROFILER_DISABLE_DB_MEM' ) and CJ_STORM_PROFILER_DISABLE_DB_MEM ) {
$dbMem = false;
}
if ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) {
if ( $dbMem ) {
\IPS\storm\Profiler::i()->memoryEnd( 'DB Query', $query );
}
$bt = var_export( debug_backtrace( DEBUG_BACKTRACE_IGNORE_ARGS ), true );
$this->log[ $this->currentQ ] = [
'query' => $query,
'backtrace' => $bt,
];
}
else {
parent::log( $query, $server );
}
}
public function sendToProfiler()
{
if ( isset( $this->log[ $this->currentQ ] ) ) {
$data = $this->log[ $this->currentQ ];
\IPS\storm\Profiler::i()->dbQuery( $data, round( $this->final, 4 ) );
$this->currentQ++;
}
}
public function preparedQuery( $query, array $_binds, $read = false )
{
if ( defined( 'CJ_STORM_PROFILER_DISABLE_DB' ) and CJ_STORM_PROFILER_DISABLE_DB ) {
return parent::preparedQuery( $query, $_binds );
}
$dbMem = true;
if ( defined( 'CJ_STORM_PROFILER_DISABLE_DB_MEM' ) and CJ_STORM_PROFILER_DISABLE_DB_MEM ) {
$dbMem = false;
}
if ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) {
$this->start = microtime( true );
if ( $dbMem ) {
\IPS\storm\Profiler::i()->memoryStart();
}
$bindsS = [];
$queryS = $query;
$i = 0;
for ( $j = 0; $j < \strlen( $queryS ); $j++ ) {
if ( $queryS[ $j ] == '?' ) {
if ( array_key_exists( $i, $_binds ) ) {
if ( $_binds[ $i ] instanceof \IPS\Db\Select ) {
$queryS = \substr( $queryS, 0, $j ) . $_binds[ $i ]->query . \substr( $queryS, $j + 1 );
$j += \strlen( $_binds[ $i ]->query );
foreach ( $_binds[ $i ]->binds as $_bind ) {
$bindsS[] = $_bind;
}
}
else {
$bindsS[] = $_binds[ $i ];
}
$i++;
}
}
}
$this->log( static::_replaceBinds( $queryS, $bindsS ) );
$parent = parent::preparedQuery( $query, $_binds, $read );
$this->final = microtime( true ) - $this->start;
$this->sendToProfiler();
return $parent;
}
return parent::preparedQuery( $query, $_binds, $read );
}
}
<file_sep>/dev/lang.php
<?php
$lang = [
'__app_storm' => "Storm",
'menu__storm_configuration' => "Storm",
'menu__storm_configuration_settings' => "Settings",
'menu__storm_configuration_apps' => "Dev Folder: Applications",
'menu__storm_configuration_proxyclass' => "Proxy Classes Generator",
'menu__storm_configuration_plugins' => "Dev Folder: Plugins",
'dev_class' => "Sources",
'storm_settings_headerdoc__tabbed' => "Headerdoc",
'storm_settings_headerdoc_enabled' => "Enabled",
'storm_settings_headerdoc_enabled_desc' => 'If enabled, will add a file header to each php class.',
'storm_settings_headerdoc_allowed_apps' => "Allowed Apps",
'storm_settings_headerdoc_allowed_apps_desc' => "Select Apps to execute Header doc generation on 'build' and on 'download'.",
'storm_settings_headerdoc__sidebar' => "If enabled, will add a temporary header doc to each php class file when app is built. On download, will replace the temp header doc with a permanent one, container @brief (class name), @author (app author), @copyright (current year with author name from app), @package (IPS Social Suite), @subpackage (App's name), @since (which version file was added in), @version (current version of app). Excluded folders: 3rdparty, vendor, dev, hooks, data (these are case sensitive).",
'storm_proxyclass_progress' => "Processing %s of %s",
'storm_proxyclass_done' => "Proxy Classes have been Generated",
'storm_proxyclass_title' => "Proxy Classes",
'storm_proxyclass_button' => "Generate Proxy Classes",
'storm_apps_apps_select' => "Select Application",
'storm_apps_app' => "Application",
'storm_apps_app_desc' => "Select which application to generate the Dev Folder for.",
'storm_apps_type' => "Type",
'storm_apps_type_desc' => "Choose everything to recreate the entire Dev Folder, or select which part of the Dev folder you wish to recreate.",
'storm_apps_type_lang' => "Language",
'storm_apps_type_js' => "Javascript",
'storm_apps_type_template' => "Templates",
'storm_apps_type_email' => "Email Templates",
'storm_apps_type_all' => "Everything",
'storm_apps_type_select' => "Select Type",
'storm_apps_return_javascript' => "Javascript Files Generated",
'storm_apps_return_templates' => "Template Files Generated",
'storm_apps_return_email' => "Email Template Files Generated",
'storm_apps_return_lang' => "Language Strings Generated",
'storm_apps_queue_title' => "Generating Dev Files",
'storm_apps_total_done' => "Processing %s of %s",
'storm_apps_completed' => "Dev Folder Generated Completed!",
'storm_plugins_done' => "The Dev Folder for %s has been generated!",
'storm_plugin_upload' => "Plugin XML",
'storm_plugin_upload_desc' => "Upload the plugin's xml file here. Will install the plugin if its not already installed (this will overwrite if a dev folder is already there).",
'storm_plugins_title' => "Plugins Dev Folder Generator",
'storm_apps_title' => "Applications Dev Folder Generator",
'storm_settings_title' => "Settings",
'storm_proxyclass_sidebar' => "Generates proxy classes for IPS/IPS Apps/3rd party apps, This is useful for IDE's, so useful features such as autocomplete and hinting can be used, since IPS's framework is uniuque.",
'storm_class_type' => "Class Type",
'storm_class_type_desc' => "Class type, Class for a regular class. Node for a \\IPS\\Node\\Model class. Content Item for a \\IPS\\Content\\Item class. Content Item Comment Class for a \\IPS\\Content\\Comment class.",
'storm_class_namespace' => "Namespace",
'storm_class_namespace_desc' => "Namespace for the class, leave blank if not a subclass. Class: \\IPS\\myapp, SubClass: \\IPS\\myapp\\ParentClass",
'storm_class_className' => "Class Name",
'storm_class_className_desc' => "do not include the underscore( _ ). This will also be the file's name.",
'storm_class_extends' => "Extends",
'storm_class_extends_desc' => "Does this class extend another class? if yes, please use the FQN here.",
'storm_class_implements' => "Implements",
'storm_class_implements_desc' => "will this class implement any interface classes? Use the FQN.",
'storm_classes_class_no_exist' => "This class already exist in this namespace.",
'storm_classes_extended_class_no_exist' => "The extended class does not exist!",
'storm_classes_implemented_no_interface' => "One of your implemented interface classes does not exist!",
'storm_classes_type_no_selection' => "Please select a type!",
'storm_settings_general__tabbed' => "General",
'storm_settings_enable_debug' => "Enable",
'storm_settings_enable_debug_desc' => "Enable console output",
'ext__Headerdoc' => 'Customize how the headerdoc works for storm.',
'storm_settings_profiler' => "Profiler",
'storm_settings_enable_query' => "Enable DB Queries",
'storm_class_created' => "Created %s Class: %s has been created successfully!",
'storm_devfolder_created' => "%s has been successfully created.",
'storm_settings_tab_debug_templates' => "Debug Templates",
'storm_settings_tab_debug_templates_desc' => "This will write the templates to disk for easier debugging.",
'storm_settings_tab_debug_css' => "Debug CSS",
'storm_settings_tab_debug_css_desc' => "Requires debug templates to be enabled, but writes the css to disk and allows for easier debugging.",
'storm_settings_tab_debug_css_alt' => "Alt CSS Loading",
'storm_settings_tab_debug_css_alt_desc' => "if you are getting no css, you could be hitting the query string limit in apache, enable this to try to avoid it.",
'storm_settings_tab_profiler' => "Profiler",
'storm_settings_tab_debug' => "Dev & Debug",
'storm_dev_folder' => "Dev Folder",
'storm_devfolder_type' => "Type",
'storm_devfolder_type_desc' => "Select what dev folder type you want to create. (template, javascript)",
'storm_devfolder_args' => "Arguments",
'storm_devfolder_args_desc' => "the argumenuts for the file.",
'storm_devfolder_loc' => "Location",
'storm_devfolder_loc_desc' => "accessed from (front, admin, global)",
'storm_devfolder_group' => "Group",
'storm_devfolder_group_desc' => "containing group for file",
'storm_devfolder_filename' => "Filename",
'storm_devfolder_filename_desc' => "name of the file.",
'storm_devfolder_loc_error' => "You need to select a location",
'storm_devfolder_group_error' => 'enter a group',
'storm_devfolder_widgetname_error' => "Widget Name required",
'storm_devfolder_filename_error' => "enter a filename",
'storm_devfolder_filename_exist' => "File with this name exists at this location.",
'storm_devfolder_widgetname' => "Widget Name",
'storm_devfolder_widgetname_desc' => "Used when loading the widget over the data api",
'storm_create_class' => "Create Class File",
'storm_create_class_desc' => "This allows you to create your node/AR/Item/Comment class with your database",
'storm_classes_no_blank' => "Can't be blank!",
'storm_class_item_node_class' => "Node/Item Class",
'storm_class_item_node_class_desc' => "Enter the node/item class for Item/Comment class you are about to create.",
'storm_class_node_item_missing' => "Node/Item Class doesn't exists.",
'storm_class_database' => "Database",
'storm_class_prefix' => "Prefix",
'storm_profiler_is_fixed' => "Always show Profiler",
'storm_progress' => "%s out of %s",
'storm_mc_limit' => "Amount",
'storm_mc_limit_desc' => "How many dummy records to create",
'storm_mc_passwords' => "<PASSWORD>",
'storm_mc_avatars' => "Create Avatars",
'storm_mc_group' => "Group",
'menu__storm_configuration_generator' => "Dummy Data Generator",
'storm_gen_type' => "Type",
'storm_gen_limit' => "Amount",
'storm_gen_limit_desc' => "How many of the type to create. Forums will create between 1 to 12 topics and each topic will create between 1 to 12 posts, Topics will create 1 to 12 posts.",
'storm_gen_none' => "Please select a type!",
'storm_generation_done' => "Generation Done",
'storm_generation_delete_done' => "Dummy Data Deleted",
'storm_apps_folder_exist' => "The folder %s already exist. please remove this folder if you want to replace.",
'storm_apps_please_select' => "Please make a selection",
'module__storm_bitbucket' => "Bitbucket",
'module__storm_general' => "General",
'r__Proxyclass' => "Proxyclass",
'r__proxyclass_manage' => "can generate proxyclasses",
'r__apps' => "Apps",
'r__apps_manage' => "can generate application dev folders",
'r__members_manage' => "can use generator",
'r__storm_create_members_loop' => "Generate Members",
'r__storm_create_generation_loop' => "Generate Forums",
'r__Geneerators' => "Generators",
'storm_query_select' => "Action",
'storm_query_table' => 'Table',
'storm_query_add_column' => 'Column',
'storm_query_type' => "Type",
'storm_query_length' => "Length",
'storm_query_decimals' => 'Decimals',
'storm_query_default' => "Default",
'storm_query_comment' => 'Comment',
'storm_query_allow_null' => "Allow Null",
'storm_query_sunsigned' => 'Unsigned',
'storm_query_zerofill' => "ZeroFill",
'storm_query_auto_increment' => 'Auto-Increment',
'storm_query_binary' => 'Binary',
'storm_query_unsigned' => 'Value',
'storm_query_values' => 'Values',
'storm_query_columns' => 'Table Column',
'storm_query_code' => 'Query Box',
'storm_member_creation_done' => 'Dummy data creation done',
'storm_remote_key_use' => "Remote Key",
'storm_remote_key_use_desc' => "<br>Use this key in another storm instance to sync apps.",
'storm_remote_url' => "Remote Interface",
'storm_remote_url_desc' => "<br> Use this for the remote interface in another storm instance to sync an app",
'storm_ftp_app' => "Remote App",
'general_tab' => "General",
'remote_tab' => "Sync",
'storm_ftp_path' => "FTP Path",
'storm_ftp_path_desc' => "Full path to where the synced file is located.",
'storm_ftp_key' => "Remote Key",
'storm_ftp_interface_host' => "Remote Interface",
'storm_ftp_host' => "FTP Host",
'storm_ftp_username' => "FTP Username",
'storm_ftp_pass' => "FTP <PASSWORD>",
'storm_ftp_port' => "FTP Port",
'storm_ftp_timeout' => "FTP Timeout",
'storm_ftp_ssh' => "Use SSH",
'storm_cron_task' => "Cron Task",
'storm_cron_task_desc' => "<br>Add this to your cron, set to however often you want it to run.",
'menu__storm_configuration_menu' => "Menu",
'storm_menu_name' => 'Name',
'storm_menu_parent' => "Parent",
'storm_menu_type' => 'Type',
'storm_menu_internal' => 'Url',
'storm_menu_external' => 'Url',
'storm_ftp_secure' => "Secure Connection",
'storm_settings_disable_menu' => "Disable Menu",
'storm_settings_disable_menu_desc' => "Disables the header menu.",
'menu__storm_configuration_sync' => "Sync"
// 'storm_apps_select_app' => "Select App"
];
<file_sep>/modules/admin/configuration/generator.php
<?php
/**
* @brief Generator Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 2.1.0
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* members
*/
class _generator extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'members_manage' );
parent::execute();
}
protected function manage()
{
$groups = [];
foreach( \IPS\Member\Group::groups() as $k => $v )
{
$groups[ $k ] = $v->get_formattedName();
}
$url = $this->url->setQueryString( [ 'do' => 'delete', 'oldDo' => \IPS\Request::i()->do ] );
\IPS\Output::i()->sidebar[ 'actions' ][ 'refresh' ] = [
'icon' => 'delete',
'title' => 'Delete Dummy Data',
'link' => $url,
];
$el = [
[
'class' => "Select",
'name' => "storm_gen_type",
'options' => [
'options' => [
'none' => "Select Type",
'members' => "Members",
'forums' => "Forums",
'topics' => "Topics",
'posts' => "Posts"
],
'toggles' => [
'members' => [
'storm_mc_passwords',
'storm_mc_avatars',
'storm_mc_group'
]
]
],
'validation' => function( $data )
{
if( $data == "none" )
{
throw new \InvalidArgumentException( 'storm_gen_none' );
}
}
],
[
'class' => "Number",
'name' => "storm_mc_limit",
'default' => 1,
'options' => [
'min' => 1
]
],
[
'class' => "YesNo",
'default' => 1,
'name' => "storm_mc_passwords"
],
[
'class' => "YesNo",
'default' => 1,
'name' => "storm_mc_avatars"
],
[
'class' => "Select",
'name' => "storm_mc_group",
'default' => \IPS\Settings::i()->getFromConfGlobal( 'member_group' ),
'options' => [
'options' => $groups,
]
]
];
$form = \IPS\storm\Forms::i( $el );
if( $vals = $form->values() )
{
if( $vals[ 'storm_gen_type' ] == "members" )
{
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator&do=createMembers" )
->setQueryString( [
'password' => $vals[ '<PASSWORD>' ],
'limit' => $vals[ 'storm_mc_limit' ],
'avatar' => $vals[ 'storm_mc_avatars' ],
'group' => $vals[ 'storm_mc_group' ]
] );
}
else
{
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator&do=generator" )
->setQueryString( [
'type' => $vals[ 'storm_gen_type' ],
'limit' => $vals[ 'storm_mc_limit' ]
] );
}
\IPS\Output::i()->redirect( $url );
}
\IPS\Output::i()->title = "Generate Dummy Data";
\IPS\Output::i()->output = $form;
}
protected function delete()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'storm_create_delete_loop' );
\IPS\Output::i()->title = "Delete Content";
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator&do=delete" );
$url->setQueryString( [ 'oldDo' => \IPS\Request::i()->oldDo ] );
\IPS\Output::i()->output = new \IPS\Helpers\MultipleRedirect(
$url,
function( $data )
{
$offset = 0;
if( isset( $data[ 'offset' ] ) )
{
$offset = $data[ 'offset' ];
}
if( !isset( $data[ 'total' ] ) )
{
$total = \IPS\Db::i()->select(
'COUNT(*)',
'storm_generator'
)->first();
}
else
{
$total = $data[ 'total' ];
}
$limit = 10;
$select = \IPS\Db::i()->select(
'*',
'storm_generator',
[],
'generator_id ASC',
$limit,
null,
null,
\IPS\Db::SELECT_SQL_CALC_FOUND_ROWS
);
if( !$select->count() )
{
return null;
}
$content = new \IPS\Patterns\ActiveRecordIterator(
\IPS\Db::i()->select( '*', 'storm_generator', [], 'generator_id ASC', $limit ),
'IPS\storm\Generator'
);
foreach( $content as $key => $v )
{
$v->process();
$offset++;
}
$progress = ( $offset / $total ) * 100;
$language = \IPS\Member::loggedIn()->language()->addToStack( 'storm_progress', false, [
'sprintf' => [
$offset,
$total
]
] );
return [ [ 'total' => $total, 'offset' => $offset ], $language, $progress ];
},
function()
{
/* And redirect back to the overview screen */
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator" );
\IPS\Output::i()->redirect( $url, 'storm_generation_delete_done' );
}
);
}
protected function createMembers()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'storm_create_members_loop' );
\IPS\Output::i()->title = "Member Creation";
$limit = \IPS\Request::i()->limit ?: 10;
$password = \IPS\Request::i()->password ?: null;
$group = \IPS\Request::i()->group ?: null;
$avatar = \IPS\Request::i()->avatar ?: null;
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator&do=createMembers" )
->setQueryString( [
'password' => <PASSWORD>,
'limit' => $limit,
'avatar' => $avatar,
'group' => $group
] );
\IPS\Output::i()->output = new \IPS\Helpers\MultipleRedirect(
$url,
function( $data )
{
$offset = 0;
$limit = \IPS\Request::i()->limit ?: 10;
$password = \IPS\Request::i()->password ?: <PASSWORD>;
$group = \IPS\Request::i()->group ?: null;
$avatar = \IPS\Request::i()->avatar ?: null;
if( isset( $data[ 'offset' ] ) )
{
$offset = $data[ 'offset' ];
}
if( isset( $data[ 'limit' ] ) )
{
$limit = $data[ 'limit' ];
}
if( isset( $data[ 'password' ] ) )
{
$password = $data[ 'password' ];
}
if( isset( $data[ 'group' ] ) )
{
$group = $data[ 'group' ];
}
if( isset( $data[ 'avatar' ] ) )
{
$avatar = $data[ 'avatar' ];
}
$max = 10;
if( $limit < $max )
{
$max = $limit;
}
if( $offset >= $limit )
{
return null;
}
for( $i = 0; $i < $max; $i++ )
{
$mem = new \IPS\storm\Pseudo\Member;
$mem->run( $password, $group, $avatar );
$offset++;
}
$progress = ( $offset / $limit ) * 100;
$language = \IPS\Member::loggedIn()->language()->addToStack( 'storm_progress', false, [
'sprintf' => [
$offset,
$limit
]
] );
return [
[
'password' => <PASSWORD>,
'group' => $group,
'avatar' => $avatar,
'limit' => $limit,
'offset' => $offset
],
$language,
$progress
];
},
function()
{
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator" );
\IPS\Output::i()->redirect( $url, 'storm_member_creation_done' );
}
);
}
protected function generator()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'storm_create_generation_loop' );
\IPS\Output::i()->title = "Generator";
$type = \IPS\Request::i()->type ?: "forums";
$limit = \IPS\Request::i()->limit ?: 10;
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator&do=generator" )
->setQueryString( [
'type' => $type,
'limit' => $limit
] );
\IPS\Output::i()->output = new \IPS\Helpers\MultipleRedirect(
$url,
function( $data )
{
$offset = 0;
$type = \IPS\Request::i()->type ?: "Forum";
$limit = \IPS\Request::i()->limit ?: 10;
if( isset( $data[ 'offset' ] ) )
{
$offset = $data[ 'offset' ];
}
if( isset( $data[ 'limit' ] ) )
{
$limit = $data[ 'limit' ];
}
if( isset( $data[ 'type' ] ) )
{
$type = $data[ 'type' ];
}
$max = 1;
if( $limit < $max )
{
$max = $limit;
}
if( $offset >= $limit )
{
return null;
}
for( $i = 0; $i < $max; $i++ )
{
switch( $type )
{
case "forums":
\IPS\storm\Pseudo\Generator::i()->generateForum( false, true );
break;
case "topics":
\IPS\storm\Pseudo\Generator::i()->generateTopic();
break;
case "posts":
\IPS\storm\Pseudo\Generator::i()->generatePost();
break;
}
$offset++;
}
$progress = ( $offset / $limit ) * 100;
$language = \IPS\Member::loggedIn()->language()->addToStack( 'storm_progress', false, [
'sprintf' => [
$offset,
$limit
]
] );
return [ [ 'type' => $type, 'limit' => $limit, 'offset' => $offset ], $language, $progress ];
},
function()
{
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=generator" );
\IPS\Output::i()->redirect( $url, 'storm_member_creation_done' );
}
);
}
}
<file_sep>/Application.php
<?php
/**
* @brief Application Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.3
* @version -storm_version-
*/
namespace IPS\storm;
/**
* Fixer Application Class
*/
class _Application extends \IPS\Application
{
public function acpMenu()
{
if( \IPS\IN_DEV )
{
$dir = \IPS\ROOT_PATH . "/applications/" . $this->directory . "/dev/";
if( !file_exists( $dir ) )
{
$app = new \IPS\storm\Apps( $this );
$app->addToStack = true;
$app->email();
$app->javascript();
$app->language();
$app->templates();
}
}
return parent::acpMenu(); // TODO: Change the autogenerated stub
}
public function installOther()
{
\IPS\storm\Menu::importMenus( \IPS\storm\Menu::$installMenus );
parent::installOther(); // TODO: Change the autogenerated stub
}
}<file_sep>/sources/Proxyclass/Proxyclass.php
<?php
/**
* @brief Proxyclass Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.0
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Proxyclass extends \IPS\Patterns\Singleton
{
public static $instance = null;
protected $save = 'proxyclasses';
public function run( $data = [] )
{
$i = 0;
if( isset( \IPS\Data\Store::i()->storm_proxyclass_files ) )
{
$iterator = \IPS\Data\Store::i()->storm_proxyclass_files;
$totalFiles = $data[ 'total' ];
$limit = 50;
foreach( $iterator as $key => $file )
{
$i++;
$filePath = $file[ 0 ];
$this->build( $filePath );
unset( $iterator[ $key ] );
if( $i == $limit )
{
break;
}
}
\IPS\Data\Store::i()->delete( 'storm_proxyclass_files' );
}
if( $i )
{
if( is_array( $iterator ) and count( $iterator ) )
{
\IPS\Data\Store::i()->storm_proxyclass_files = $iterator;
}
if( $data[ 'current' ] )
{
$offset = $data[ 'current' ] + $i;
}
else
{
$offset = $i;
}
return [ 'total' => $totalFiles, 'current' => $offset, 'progress' => $data[ 'progress' ] ];
}
else
{
$this->buildConstants();
return null;
}
}
public function build( $file )
{
$ds = DIRECTORY_SEPARATOR;
$root = \IPS\ROOT_PATH;
$save = $root . $ds . $this->save . $ds;
if( !is_dir( $save ) )
{
return;
}
$content = \file_get_contents( $file );
$content = \preg_replace( '!/\*.*?\*/!s', '', $content );
$content = \preg_replace( '/\n\s*\n/', "\n", $content );
preg_match( '#\$databaseTable(.*?)\=(.*?)[\'|"](.*?)[\'|"]\;#msu', $content, $match);
$db = null;
if( isset( $match[3] ) ){
$db = $match[3];
}
\preg_match( '/namespace(.+?)([^\;]+)/', $content, $matched );
$namespace = null;
if( isset( $matched[ 0 ] ) )
{
$namespace = $matched[ 0 ];
}
$regEx = '#(?:(?<!\w))(?:[^\w]|\s+)(?:(?:(?:abstract|final|static)\s+)*)class\s+([-a-zA-Z0-9_]+)?#';
$run = function( $matches ) use ( $namespace, $save, $db )
{
if( isset( $matches[ 1 ] ) )
{
if( mb_substr( $matches[ 1 ], 0, 1 ) === '_' )
{
$content = '';
$append = \ltrim( $matches[ 1 ], '\\' );
$class = \str_replace( '_', '', \ltrim( $matches[ 1 ], '\\' ) );
$extra = '';
$testClass = \str_replace( 'namespace ', '', $namespace ) . '\\' . $class;
$isSettings = false;
//took less than 5 minutes to implement this 'ultra complex' code
try
{
if( $db and method_exists( $testClass, 'db' ) )
{
if( $testClass::db()->checkForTable( $testClass::$databaseTable ) )
{
$foo = $testClass::db()->getTableDefinition( $testClass::$databaseTable );
if( isset( $foo[ 'columns' ] ) )
{
foreach( $foo[ 'columns' ] as $key => $val )
{
if( mb_substr( $key, 0, mb_strlen( $testClass::$databasePrefix ) ) ==
$testClass::$databasePrefix
)
{
$key = mb_substr( $key, mb_strlen( $testClass::$databasePrefix ) );
}
$extra .= "public \${$key} = '';\n";
}
}
}
}
if( $testClass === 'IPS\Settings' ){
$isSettings = true;
$load = $testClass::i()->getData();
foreach( $load as $key => $val ){
$extra .= "public \${$key} = '';\n";
}
}
}
catch( \Exception $e ){};
if( !$isSettings ) {
$alt = \str_replace( [
"\\",
" ",
";",
], "_", $namespace );
}
else{
$alt = 'IPS_Settings_lone';
}
if( !\is_file( $save . $alt . '.php' ) )
{
$content = "<?php\n\n";
if( $namespace )
{
$content .= $namespace . ";\n";
}
}
$content .= str_replace( '_', '', $matches[ 0 ] ) . ' extends ' . $append . '{' . PHP_EOL . $extra . '}' . "\n";
$createdClass[ \str_replace( 'namespace ', '', $namespace ) ][] = $class;
\file_put_contents( $save . $alt . ".php", $content, FILE_APPEND );
\chmod( $save . $alt . ".php", 0777 );
}
}
};
preg_replace_callback( $regEx, $run, $content, 1 );
}
public function buildConstants(){
$load = \IPS\IPS::defaultConstants();
$ds = DIRECTORY_SEPARATOR;
$root = \IPS\ROOT_PATH;
$save = $root . $ds . $this->save . $ds;
$extra = "\n";
foreach( $load as $key => $val ){
if( !is_numeric( $val ) ){
$val = "'".$val."'";
}
$extra .= 'define( "IPS\\'.$key.'",'. $val.");\n";
}
$php = <<<EOF
<?php
{$extra}
EOF;
\file_put_contents( $save . "IPS_Constants_lone.php", $php );
\chmod( $save . "IPS_Constants_lone.php", 0777 );
}
public function generateSettings(){
$ds = DIRECTORY_SEPARATOR;
$root = \IPS\ROOT_PATH;
$save = $root . $ds . $this->save . $ds;
$load = \IPS\Settings::i()->getData();
$extra = "\n";
foreach( $load as $key => $val ){
$extra .= "public \${$key} = '';\n";
}
$php = <<<EOF
<?php
namespace IPS;
class Settings extends _Settings {
{$extra}
}
EOF;
\file_put_contents( $save . "IPS_Settings_lone.php", $php );
\chmod( $save . "IPS_Settings_lone.php", 0777 );
}
public function dirIterator()
{
$ds = DIRECTORY_SEPARATOR;
$root = \IPS\ROOT_PATH;
$save = $root . $ds . $this->save . $ds;
if( \is_dir( $save ) )
{
$files = \glob( $save . "*" );
foreach( $files as $file )
{
if( \is_file( $file ) )
{
\unlink( $file );
}
}
\rmdir( $save );
}
if( !is_dir( $save ) )
{
\mkdir( $save );
\chmod( $save, 0777 );
}
$exclude = [
$this->save,
'.htaccess',
'datastore',
'plugins',
'dev',
'admin',
'api',
'interface',
'uploads',
'data',
'extensions',
'hooks',
'setup',
'modules',
'tasks',
'widgets',
'Plugin',
'3rdparty',
'3rd_party',
'themes',
'conf_global.php',
'index.php',
'sitemap.php',
'constants.php',
'init.php',
'error.php',
'404error.php',
'StormTemplates',
];
$filter = function( $file, $key, $iterator ) use ( $exclude )
{
if( !\in_array( $file->getFilename(), $exclude ) )
{
return true;
}
return false;
};
$dirIterator = new \RecursiveDirectoryIterator(
$root,
\RecursiveDirectoryIterator::SKIP_DOTS
);
$iterator = new \RecursiveIteratorIterator(
new \RecursiveCallbackFilterIterator( $dirIterator, $filter ),
\RecursiveIteratorIterator::SELF_FIRST
);
$iterator = new \RegexIterator( $iterator, '/^.+\.php$/i', \RecursiveRegexIterator::GET_MATCH );
$iterator = iterator_to_array( $iterator );
if( isset( \IPS\Data\Store::i()->storm_proxyclass_files ) )
{
unset( \IPS\Data\Store::i()->storm_proxyclass_files );
}
\IPS\Data\Store::i()->storm_proxyclass_files = $iterator;
return count( $iterator );
}
}<file_sep>/modules/admin/configuration/apps.php
<?php
/**
* @brief Apps Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.6
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* apps
*/
class _apps extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'apps_manage' );
parent::execute();
}
/**
* ...
*
* @return void
*/
protected function manage()
{
$apps = \IPS\Data\Store::i()->applications;
$groups[ 'select' ] = \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_apps_select' );
foreach( $apps as $key => $val )
{
$groups[ $val[ 'app_directory' ] ] = \IPS\Member::loggedIn()
->language()
->get( "__app_{$val[ 'app_directory' ]}" );
}
$langs = [
'select' => \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_type_select' ),
'all' => \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_type_all' ),
'language' => \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_type_lang' ),
'javascript' => \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_type_js' ),
'templates' => \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_type_template' ),
'email' => \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_type_email' ),
];
$validation = function( $data )
{
if( $data == "select" )
{
throw new \InvalidArgumentException( 'form_bad_value' );
}
$app = \IPS\Request::i()->storm_apps_app;
$folders = \IPS\ROOT_PATH . "/applications/{$app}/dev";
$f = $folders;
$folders2 = false;
$folders3 = false;
if( $data != "all" )
{
switch( $data )
{
case 'language':
$folders .= "/lang.php";
$folders2 = $f . "/jslang.php";
break;
case "javascript":
$folders .= "/js/";
break;
case "templates":
$folders .= "/html/";
$folders2 = $f . "/css/";
$folders3 = $f . "/resources/";
break;
case "email":
$folders .= "/email/";
break;
}
}
if( file_exists( $folders ) or ( $folders2 and file_exists( $folders2 ) and $folders = $folders2 ) or ( $folders3 and file_exists( $folders3 ) and $folders = $folders3 ) )
{
$lang = \IPS\Member::loggedIn()
->language()
->addToStack( 'storm_apps_folder_exist', false, [ 'sprintf' => $folders ] );
throw new \InvalidArgumentException( $lang );
}
};
$el = [
[
'name' => 'storm_apps_app',
'class' => 'Select',
'ap' => true,
'default' => 'select',
'options' => [
'options' => $groups
],
'v' => function( $data )
{
if( $data == "select" )
{
throw new \InvalidArgumentException( 'form_bad_value' );
}
}
],
[
'name' => 'storm_apps_type',
'class' => 'Select',
'ap' => true,
'default' => 'select',
'options' => [
'options' => $langs
],
'v' => $validation
]
];
$form = \IPS\storm\Forms::i( $el );
if( $vals = $form->values() )
{
$app = $vals[ 'storm_apps_app' ];
$type = $vals[ 'storm_apps_type' ];
if( $type === "all" )
{
\IPS\Output::i()->redirect( $this->url->setQueryString( [ 'do' => "queue", 'appKey' => $app ] ) );
}
else
{
$return = \IPS\storm\Apps::i( $app )->{$type}();
\IPS\Output::i()->redirect( $this->url, $return );
}
}
\IPS\Output::i()->title = \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_title' );
\IPS\Output::i()->output = $form;
}
protected function queue()
{
\IPS\Output::i()->title = \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_queue_title' );
$app = \IPS\Request::i()->appKey;
\IPS\Output::i()->output = new \IPS\Helpers\MultipleRedirect(
\IPS\Http\Url::internal( "app=storm&module=configuration&controller=apps&do=queue&appKey=" . $app ),
function( $data )
{
$app = \IPS\Request::i()->appKey;
$end = false;
if( isset( $data[ 'next' ] ) )
{
$do = $data[ 'next' ];
}
else
{
$do = 'language';
}
$done = 0;
switch( $do )
{
case 'language':
\IPS\storm\Apps::i( $app )->language();
$done = 25;
$next = 'javascript';
break;
case 'javascript':
\IPS\storm\Apps::i( $app )->javascript();
$done = 50;
$next = 'templates';
break;
case 'templates':
\IPS\storm\Apps::i( $app )->templates();
$done = 75;
$next = 'email';
break;
case 'email':
\IPS\storm\Apps::i( $app )->email();
$done = 100;
$next = 'default';
break;
default:
$end = true;
break;
}
if( $end )
{
return null;
}
else
{
$language = \IPS\Member::loggedIn()->language()->addToStack( 'storm_apps_total_done', false,
[ 'sprintf' => [ $done, 100 ] ] );
return [ [ 'next' => $next ], $language, $done ];
}
},
function()
{
/* And redirect back to the overview screen */
\IPS\Output::i()->redirect( \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=apps' ),
'storm_apps_completed' );
}
);
}
}<file_sep>/sources/Bitbucket/Bitbucket.php
<?php
/**
* @brief Bitbucket Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.4
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Bitbucket extends \IPS\Patterns\ActiveRecord
{
/**
* @brief [ActiveRecord] Database table
*/
public static $databaseTable = 'storm_webhooks';
/**
* @brief [ActiveRecord] Database Prefix
*/
public static $databasePrefix = "";
/**
* @brief [ActiveRecord] ID Database Column
*/
public static $databaseColumnId = 'id';
/**
* @brief [ActiveRecord] Multiton Store
*/
protected static $multitons;
/**
* @brief [ActiveRecord] Database ID Fields
*/
protected static $databaseIdFields = [];
/**
* @brief Bitwise keys
*/
protected static $bitOptions = [];
public static function createNewBitbucket()
{
$push = file_get_contents( 'php://input' );
$push = json_decode( $push );
$repo = $push->repository->name;
$push = $push->push;
foreach( $push->changes as $k => $val )
{
try
{
$info = $val->new;
$new = new \IPS\storm\Bitbucket;
$new->type = "Push";
$new->link = $info->target->links->html->href;
$new->repo = $repo;
$new->hash = $info->target->hash;
$new->message = $info->target->message;
$new->branch = $info->name;
$new->branchLink = $info->links->html->href;
$new->username = $info->target->author->user->username;
$new->displayname = $info->target->author->user->display_name;
$time = strtotime( $info->target->date );
$new->date = $time ?: \time();
$new->save();
}
catch( \Exception $e )
{
}
}
}
public function get_message()
{
return $this->_data[ 'message' ];
}
public function get_hash()
{
return mb_substr( $this->_data[ 'hash' ], 0, 7 );
}
public function get_branch()
{
if( $this->_data[ 'repo' ] === "babble" and $this->_data[ 'branch' ] == "2.1.4" )
{
return "2.2.0";
}
return $this->_data[ 'branch' ];
}
public function get_date()
{
return \IPS\DateTime::ts( $this->_data[ 'date' ] );
}
}<file_sep>/hooks/dispatcherStandard.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
abstract class storm_hook_dispatcherStandard extends _HOOK_CLASS_
{
protected static function baseJs()
{
parent::baseJs();
if ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) {
\IPS\Output::i()->jsFiles = \array_merge(
\IPS\Output::i()->jsFiles,
\IPS\Output::i()->js(
'global_profiler.js',
'storm',
'global'
)
);
\IPS\Output::i()->cssFiles = \array_merge(
\IPS\Output::i()->cssFiles,
\IPS\Theme::i()->css(
'profiler.css',
'storm',
'front'
)
);
}
if ( defined( 'CJ_STORM_DEBUG' ) and CJ_STORM_DEBUG ) {
$settings[ 'storm_debug_url' ] = \IPS\Settings::i()->base_url . 'applications/storm/interface/debug/index.php';
$settings[ 'storm_debug_enabled' ] = ( defined( 'CJ_STORM_DEBUG' ) and CJ_STORM_DEBUG ) ? true : false;
$settings[ 'storm_debug_time' ] = time();
\IPS\Output::i()->jsVars = \array_merge( \IPS\Output::i()->jsVars, $settings );
\IPS\Output::i()->jsFiles = \array_merge(
\IPS\Output::i()->jsFiles,
\IPS\Output::i()->js(
'global_debug.js',
'storm',
'global'
)
);
}
}
}
<file_sep>/modules/admin/configuration/members.php
<?php
/**
* @brief Members Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 2.1.0
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* members
*/
class _members extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'members_manage' );
parent::execute();
}
/**
* ...
*
* @return void
*/
protected function manage()
{
$groups = [];
foreach( \IPS\Member\Group::groups() as $k => $v )
{
$groups[ $k ] = $v->get_formattedName();
}
$el = [
[
'class' => "Number",
'name' => "storm_mc_limit",
'default' => 10,
'options' => [
'min' => 10
]
],
[
'class' => "YesNo",
'default' => 1,
'name' => "storm_mc_passwords"
],
[
'class' => "YesNo",
'default' => 1,
'name' => "storm_mc_avatars"
],
[
'class' => "Select",
'name' => "storm_mc_group",
'default' => \IPS\Settings::i()->getFromConfGlobal( 'member_group' ),
'options' => [
'options' => $groups,
]
]
];
$form = \IPS\storm\Forms::i( $el );
if( $vals = $form->values() )
{
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=members&do=createMembers" )
->setQueryString( [
'password' => $vals[ 'storm_mc_passwords' ],
'limit' => $vals[ 'storm_mc_limit' ],
'avatar' => $vals[ 'storm_mc_avatars' ],
'group' => $vals[ 'storm_mc_group' ]
] );
\IPS\Output::i()->redirect( $url );
}
\IPS\Output::i()->title = "Create Member";
\IPS\Output::i()->output = $form;
}
// Create new methods with the same name as the 'do' parameter which should execute it
protected function createMembers()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'storm_create_members_loop' );
\IPS\Output::i()->title = "Member Creation";
$limit = \IPS\Request::i()->limit ?: 10;
$password = \IPS\Request::i()->password ?: <PASSWORD>;
$group = \IPS\Request::i()->group ?: null;
$avatar = \IPS\Request::i()->avatar ?: null;
$url = \IPS\Http\Url::internal( "app=storm&module=configuration&controller=members&do=createMembers" )
->setQueryString( [
'password' => $password,
'limit' => $limit,
'avatar' => $avatar,
'group' => $group
] );
\IPS\Output::i()->output = new \IPS\Helpers\MultipleRedirect(
$url,
function( $data )
{
$offset = 0;
$limit = \IPS\Request::i()->limit ?: 10;
$password = \IPS\Request::i()->password ?: null;
$group = \IPS\Request::i()->group ?: null;
$avatar = \IPS\Request::i()->avatar ?: null;
if( isset( $data[ 'offset' ] ) )
{
$offset = $data[ 'offset' ];
}
if( isset( $data[ 'limit' ] ) )
{
$limit = $data[ 'limit' ];
}
if( isset( $data[ 'password' ] ) )
{
$password = $data[ 'password' ];
}
if( isset( $data[ 'group' ] ) )
{
$group = $data[ 'group' ];
}
if( isset( $data[ 'avatar' ] ) )
{
$avatar = $data[ 'avatar' ];
}
$max = 10;
if( $limit < $max )
{
$max = $limit;
}
if( $offset >= $limit )
{
return null;
}
for( $i = 0; $i < $max; $i++ )
{
$mem = new \IPS\storm\Pseudo\Member;
$mem->run( $password, $group, $avatar );
$offset++;
}
$progress = ( $offset / $limit ) * 100;
$language = \IPS\Member::loggedIn()->language()->addToStack( 'storm_progress', false, [
'sprintf' => [
$offset,
$limit
]
] );
return [
[
'password' => $<PASSWORD>,
'group' => $group,
'avatar' => $avatar,
'limit' => $limit,
'offset' => $offset
],
$language,
$progress
];
},
function()
{
/* And redirect back to the overview screen */
\IPS\Output::i()
->redirect( \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=members' ),
'storm_member_creation_done' );
}
);
}
}
<file_sep>/sources/Plugins/Plugins.php
<?php
/**
* @brief Plugins Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.0
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Plugins extends \IPS\Patterns\Singleton
{
public static $instance = null;
public function finish( $file = false )
{
$return = \IPS\storm\Plugins::i()->build( $file );
@unlink( $file );
$message = \IPS\Member::loggedIn()
->language()
->addToStack( $return[ 'msg' ], false, [ 'sprintf' => [ $return[ 'name' ] ] ] );
$url = \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=plugins' );
\IPS\Output::i()->redirect( $url, $message );
}
public function build( $plugin )
{
$xml = new \IPS\Xml\XMLReader;
$xml->open( $plugin );
$xml->read();
$plugins = \IPS\ROOT_PATH . "/plugins/";
$versions = [];
$lang = [];
$langJs = [];
$settings = [];
$return = 'storm_plugins_done';
$oriName = $xml->getAttribute( 'name' );
$xml->getAttribute( 'author' );
$name = \mb_strtolower( \preg_replace( '#[^a-zA-Z0-9_]#', '', $oriName ) );
$pluginName = $oriName;
$folder = $plugins . $name . '/dev/';
$html = $folder . 'html/';
$css = $folder . 'css/';
$js = $folder . 'js/';
$resources = $folder . 'resources/';
$setup = $folder . 'setup/';
$filename = '';
$content = '';
if( !is_dir( $folder ) )
{
mkdir( $folder, 0777, true );
}
if( !is_dir( $html ) )
{
mkdir( $html, 0777, true );
}
if( !is_dir( $css ) )
{
mkdir( $css, 0777, true );
}
if( !is_dir( $js ) )
{
mkdir( $js, 0777, true );
}
if( !is_dir( $resources ) )
{
mkdir( $resources, 0777, true );
}
if( !is_dir( $setup ) )
{
mkdir( $setup, 0777, true );
}
while( $xml->read() )
{
if( $xml->nodeType != \XMLReader::ELEMENT )
{
continue;
}
if( $xml->name == 'html' )
{
$filename = $html . $xml->getAttribute( 'filename' );
$content = base64_decode( $xml->readString() );
\file_put_contents( $filename, $content );
}
if( $xml->name == 'css' )
{
$filename = $css . $xml->getAttribute( 'filename' );
$content = base64_decode( $xml->readString() );
\file_put_contents( $filename, $content );
}
if( $xml->name == 'js' )
{
$filename = $js . $xml->getAttribute( 'filename' );
$content = base64_decode( $xml->readString() );
\file_put_contents( $filename, $content );
}
if( $xml->name == 'resources' )
{
$filename = $html . $xml->getAttribute( 'filename' );
$content = base64_decode( $xml->readString() );
\file_put_contents( $filename, $content );
}
if( $xml->name == "version" )
{
$versions[ $xml->getAttribute( 'long' ) ] = $xml->getAttribute( 'human' );
$content = $xml->readString();
if( $content )
{
if( $xml->getAttribute( 'long' ) == '10000' )
{
$name = $setup . 'install.php';
}
else
{
$name = $setup . $xml->getAttribute( 'long' ) . ".php";
}
\file_put_contents( $name, $content );
}
}
if( $xml->name == "setting" )
{
$xml->read();
$key = $xml->readString();
$xml->next();
$value = $xml->readString();
$settings[] = [ "key" => $key, 'default' => $value ];
}
if( $xml->name == 'word' )
{
$key = $xml->getAttribute( 'key' );
$value = $xml->readString();
$jsW = (int)$xml->getAttribute( 'js' );
$lang[ $key ] = $value;
if( $jsW )
{
$langJs[ $key ] = $value;
}
}
}
\file_put_contents( $folder . 'settings.json', json_encode( $settings, JSON_PRETTY_PRINT ) );
\file_put_contents( $folder . 'versions.json', json_encode( $versions, JSON_PRETTY_PRINT ) );
\file_put_contents( $folder . "lang.php", "<?php\n\n \$lang = " . var_export( $lang, true ) . ";\n" );
\file_put_contents( $folder . "jslang.php", "<?php\n\n \$lang = " . var_export( $langJs, true ) . ";\n" );
return [ 'msg' => $return, 'name' => $pluginName ];
}
}<file_sep>/modules/admin/configuration/settings.php
<?php
/**
* @brief Settings Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 2.0.0
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* settings
*/
class _settings extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'settings_manage' );
parent::execute();
}
/**
* ...
*
* @return void
*/
protected function manage()
{
\IPS\Output::i()->title = "Settings";
$form = \IPS\storm\Settings::form();
$pateched = \IPS\ROOT_PATH.DIRECTORY_SEPARATOR.'init_backup.php';
if( !\file_exists( $pateched ) ) {
\IPS\Output::i()->sidebar[ 'actions' ][ 'patch' ] = [
'icon' => 'plus',
'title' => 'Patch init.php',
'link' => \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=settings&do=patchInit' ),
];
}
else{
\IPS\Output::i()->sidebar[ 'actions' ][ 'patch' ] = [
'icon' => 'minus',
'title' => 'UnPatch init.php',
'link' => \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=settings&do=unPatchInit' ),
];
}
\IPS\Output::i()->output = $form;
}
protected function sync()
{
\IPS\storm\Sync::i()->send();
\IPS\Output::i()->redirect( \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=settings' ) );
}
// Create new methods with the same name as the 'do' parameter which should execute it
protected function getFields()
{
$table = \IPS\Request::i()->table;
$fields = \IPS\Db::i()->query( "SHOW COLUMNS FROM " . \IPS\Db::i()
->real_escape_string( \IPS\Db::i()->prefix . $table ) );
$f = [];
foreach( $fields as $field ) {
$f[ array_values( $field )[ 0 ] ] = array_values( $field )[ 0 ];
}
$data = new \IPS\storm\Forms\Select(
'storm_query_columns',
null,
false,
[
'options' => $f,
'parse' => false,
],
null,
null,
null,
'js_storm_query_columns'
);
$send[ 'error' ] = 0;
$send[ 'html' ] = $data->html();
\IPS\Output::i()->json( $send );
}
protected function unPatchInit(){
$path = \IPS\ROOT_PATH . DIRECTORY_SEPARATOR;
$foo = $path. 'init.php';
@unlink( $foo );
@rename( $path.'init_backup.php', $path.'init.php');
\IPS\Output::i()->redirect( $this->url, 'init.php unpatched');
}
protected function patchInit()
{
$path = \IPS\ROOT_PATH . DIRECTORY_SEPARATOR;
$foo = $path. 'init.php';
$content = \file_get_contents( $foo );
rename($foo, $path.'init_backup.php');
$preg = "#public static function monkeyPatch\((.*?)public#msu";
$before = <<<EOF
public static function monkeyPatch( \$namespace, \$finalClass, \$extraCode = '' )
{
\$extraCode = '';
\$realClass = "_{\$finalClass}";
if( isset( self::\$hooks[ "\\\\{\$namespace}\\\\{\$finalClass}" ] ) AND \\IPS\\RECOVERY_MODE === FALSE )
{
\$path = __DIR__ . "/hook_temp/";
if( !is_dir( \$path ) )
{
\\mkdir( \$path, 0777, true );
}
foreach( self::\$hooks[ "\\\\{\$namespace}\\\\{\$finalClass}" ] as \$id => \$data )
{
if( \\file_exists( ROOT_PATH . '/' . \$data[ 'file' ] ) )
{
\$contents = "namespace {\$namespace}; " . str_replace( '_HOOK_CLASS_', \$realClass, file_get_contents( ROOT_PATH . '/' . \$data[ 'file' ] ) );
\$hash = md5( \$contents );
\$filename = \\str_replace( [ "\\\\", "/" ], "_", \$namespace . \$realClass . \$finalClass . \$data[ 'file' ] );
\$fileHash = false;
if( file_exists( \$path . \$filename ) )
{
\$fileHash = \\md5_file( \$path . \$filename );
}
if( \$hash != \$fileHash )
{
\\file_put_contents( \$path . \$filename, "<?php\\n\\n" . \$contents );
}
require_once( \$path . \$filename );
\$realClass = \$data[ 'class' ];
}
}
}
\$reflection = new \ReflectionClass( "{\$namespace}\\\\_{\$finalClass}" );
if( eval( "namespace {\$namespace}; " . \$extraCode . ( \$reflection->isAbstract() ? 'abstract' : '' ) . " class {\$finalClass} extends {\$realClass} {}" ) === false )
{
trigger_error( "There was an error initiating the class {\$namespace}\\\\{\$finalClass}.", E_USER_ERROR );
}
}
EOF;
$file = preg_replace_callback( $preg, function( $e ) use ( $before ) {
return $before . "\n\n public";
}, $content );
\file_put_contents( $foo, $file );
\IPS\Output::i()->redirect( $this->url, 'init.php patched');
}
}<file_sep>/hooks/Javascript.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Javascript extends _HOOK_CLASS_
{
public function save()
{
parent::save();
$path = \IPS\ROOT_PATH . '/plugins/' . $this->plugin;
if( is_file( $path ) )
{
$it = new \RecursiveDirectoryIterator( $path, \RecursiveDirectoryIterator::SKIP_DOTS );
$files = new \RecursiveIteratorIterator( $it, \RecursiveIteratorIterator::CHILD_FIRST );
foreach( $files as $file )
{
if( $file->isDir() )
{
rmdir( $file->getRealPath() );
}
else
{
unlink( $file->getRealPath() );
}
}
rmdir( $path );
}
}
}
<file_sep>/widgets/bitbucket.php
<?php
/**
* @brief Bitbucket Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.3
* @version -storm_version-
*/
namespace IPS\storm\widgets;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* bitbucket Widget
*/
class _bitbucket extends \IPS\Widget
{
/**
* @brief Widget Key
*/
public $key = 'bitbucket';
/**
* @brief App
*/
public $app = 'storm';
/**
* @brief Plugin
*/
public $plugin = '';
/**
* Initialise this widget
*
* @return void
*/
public function init()
{
// Use this to perform any set up and to assign a template that is not in the following format:
// $this->template( array( \IPS\Theme::i()->getTemplate( 'widgets', $this->app, 'front' ), $this->key ) );
// If you are creating a plugin, uncomment this line:
// $this->template( array( \IPS\Theme::i()->getTemplate( 'plugins', 'core', 'global' ), $this->key ) );
// And then create your template at located at plugins/<your plugin>/dev/html/bitbucket.phtml
parent::init();
}
/**
* Specify widget configuration
*
* @param null|\IPS\Helpers\Form $form Form object
* @return null|\IPS\Helpers\Form
*/
public function configuration( &$form = null )
{
if( $form === null )
{
$form = new \IPS\Helpers\Form;
}
$max = new \IPS\Helpers\Form\Number( 'storm_max_to_show',
( isset( $this->configuration[ 'storm_max_to_show' ] ) ) ? $this->configuration[ 'storm_max_to_show' ] : 5 );
$form->add( $max );
return $form;
}
/**
* Ran before saving widget configuration
*
* @param array $values Values from form
* @return array
*/
public function preConfig( $values )
{
return $values;
}
/**
* Render a widget
*
* @return string
*/
public function render()
{
$max = ( isset( $this->configuration[ 'storm_max_to_show' ] ) ) ? $this->configuration[ 'storm_max_to_show' ] : 5;
$where = [];
$messages = new \IPS\Patterns\ActiveRecordIterator(
\IPS\Db::i()->select( '*', 'storm_webhooks', $where, 'id DESC', $max ),
'IPS\storm\Bitbucket'
);
return $this->output( $messages );
// Use $this->output( $foo, $bar ); to return a string generated by the template set in init() or manually added via $widget->template( $callback );
// Note you MUST route output through $this->output() rather than calling \IPS\Theme::i()->getTemplate() because of the way widgets are cached
}
}<file_sep>/hooks/applicationBuilderFilter.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_applicationBuilderFilter extends _HOOK_CLASS_
{
public function accept()
{
if ( $this->isFile() ) {
$skip = [];
try {
$appKey = \IPS\Request::i()->appKey;
$app = \IPS\Application::load( $appKey );
foreach ( $app->extensions( 'storm', 'Headerdoc' ) as $class ) {
if ( method_exists( $class, 'headerDocFilesSkip' ) ) {
$skip = array_merge( $skip, $class->headerDocFilesSkip() );
}
}
} catch ( \Exception $e ) {
}
return !( in_array( $this->getFilename(), $skip ) );
}
return parent::accept();
}
protected function getDirectoriesToIgnore()
{
$return = parent::getDirectoriesToIgnore();
try {
$appKey = \IPS\Request::i()->appKey;
$app = \IPS\Application::load( $appKey );
foreach ( $app->extensions( 'storm', 'Headerdoc' ) as $class ) {
if ( method_exists( $class, 'headerDocDirSkip' ) ) {
$return = array_merge( $return, $class->headerDocDirSkip() );
}
}
} catch ( \Exception $e ) {
}
return $return;
}
}
<file_sep>/sources/Apps/Apps.php
<?php
/**
* @brief Apps Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.0
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Apps
{
protected static $instance = null;
public $addToStack = false;
protected $app = null;
protected $dir = null;
protected $dev = null;
final public function __construct( $app )
{
if( !( $app instanceof \IPS\Application ) )
{
$this->app = \IPS\Application::load( $app );
}
else
{
$this->app = $app;
}
$this->dir = \IPS\ROOT_PATH . "/applications/" . $this->app->directory;
$this->dev = $this->dir . '/dev/';
if( !is_dir( $this->dev ) )
{
mkdir( $this->dev, 0777, true );
}
}
public static function i( $app )
{
if( static::$instance === null )
{
static::$instance = new static( $app );
}
return static::$instance;
}
public function javascript()
{
$order = [];
$js = $this->dev . 'js/';
if( !is_dir( $js ) )
{
mkdir( $js, 0777, true );
}
$xml = new \IPS\Xml\XMLReader;
$xml->open( $this->dir . '/data/javascript.xml' );
$xml->read();
while( $xml->read() )
{
if( $xml->nodeType != \XMLReader::ELEMENT )
{
continue;
}
if( $xml->name == 'file' )
{
$loc = $js . $xml->getAttribute( 'javascript_location' );
$path = $loc . '/' . $xml->getAttribute( 'javascript_path' );
$file = $path . '/' . $xml->getAttribute( 'javascript_name' );
$order[ $path ][ $xml->getAttribute( 'javascript_position' ) ] = $xml->getAttribute( 'javascript_name' );
$content = $xml->readString();
if( !is_dir( $loc ) )
{
mkdir( $loc, 0777, true );
}
if( !is_dir( $path ) )
{
mkdir( $path, 0777, true );
}
\file_put_contents( $file, $content );
}
}
$txt = 'order.txt';
if( is_array( $order ) and count( $order ) )
{
foreach( $order as $key => $val )
{
$file = $key . '/' . $txt;
$content = '';
if( is_array( $val ) and count( $val ) )
{
ksort( $val );
foreach( $val as $k => $v )
{
$content .= $v . PHP_EOL;
}
}
\file_put_contents( $file, $content );
}
}
return 'storm_apps_return_javascript';
}
public function templates()
{
$cssDir = $this->dev . 'css';
$html = $this->dev . 'html';
$resources = $this->dev . 'resources';
if( !is_dir( $cssDir ) )
{
mkdir( $cssDir, 0777, true );
}
if( !is_dir( $html ) )
{
mkdir( $html, 0777, true );
}
if( !is_dir( $resources ) )
{
mkdir( $resources, 0777, true );
}
$xml = new \IPS\Xml\XMLReader;
$xml->open( $this->dir . '/data/theme.xml' );
$xml->read();
while( $xml->read() )
{
if( $xml->nodeType != \XMLReader::ELEMENT )
{
continue;
}
if( $xml->name == 'template' )
{
$template = [
'group' => $xml->getAttribute( 'template_group' ),
'name' => $xml->getAttribute( 'template_name' ),
'variables' => $xml->getAttribute( 'template_data' ),
'content' => $xml->readString(),
'location' => $xml->getAttribute( 'template_location' ),
];
$location = $html . '/' . $template[ 'location' ] . '/';
$path = $location . $template[ 'group' ] . '/';
$file = $path . $template[ 'name' ] . '.phtml';
if( !is_dir( $location ) )
{
mkdir( $location, 0777, true );
}
if( !is_dir( $path ) )
{
mkdir( $path, 0777, true );
}
$header = '<ips:template parameters="' . $template[ 'variables' ] . '" />' . PHP_EOL;
$content = $header . $template[ 'content' ];
\file_put_contents( $file, $content );
}
else
{
if( $xml->name == 'css' )
{
$css = [
'location' => $xml->getAttribute( 'css_location' ),
'path' => $xml->getAttribute( 'css_path' ),
'name' => $xml->getAttribute( 'css_name' ),
'content' => $xml->readString(),
];
$location = $cssDir . '/' . $css[ 'location' ] . '/';
if( !is_dir( $location ) )
{
mkdir( $location, 0777, true );
}
if( $css[ 'path' ] === '.' )
{
$path = $location;
}
else
{
$path = $location . $css[ 'path' ] . '/';
if( !is_dir( $path ) )
{
mkdir( $path, 0777, true );
}
}
$file = $path . $css[ 'name' ];
\file_put_contents( $file, $css[ 'content' ] );
}
else
{
if( $xml->name == 'resource' )
{
$resource = [
'location' => $xml->getAttribute( 'location' ),
'path' => $xml->getAttribute( 'path' ),
'name' => $xml->getAttribute( 'name' ),
'content' => base64_decode( $xml->readString() ),
];
$location = $resources . '/' . $resource[ 'location' ] . '/';
$path = $location . $resource[ 'path' ] . '/';
$file = $path . $resource[ 'name' ];
if( !is_dir( $location ) )
{
mkdir( $location, 0777, true );
}
if( !is_dir( $path ) )
{
mkdir( $path, 0777, true );
}
\file_put_contents( $file, $resource[ 'content' ] );
}
}
}
}
return 'storm_apps_return_templates';
}
public function email()
{
$email = $this->dev . 'email/';
if( !is_dir( $email ) )
{
mkdir( $email, 0777, true );
}
$xml = new \IPS\Xml\XMLReader;
$xml->open( $this->dir . '/data/emails.xml' );
$xml->read();
while( $xml->read() and $xml->name == 'template' )
{
if( $xml->nodeType != \XMLReader::ELEMENT )
{
continue;
}
$insert = [];
while( $xml->read() and $xml->name != 'template' )
{
if( $xml->nodeType != \XMLReader::ELEMENT )
{
continue;
}
switch( $xml->name )
{
case 'template_name':
$insert[ 'template_name' ] = $xml->readString();
break;
case 'template_data':
$insert[ 'template_data' ] = $xml->readString();
break;
case 'template_content_html':
$insert[ 'template_content_html' ] = $xml->readString();
break;
case 'template_content_plaintext':
$insert[ 'template_content_plaintext' ] = $xml->readString();
break;
}
}
$header = '<ips:template parameters="' . $insert[ 'template_data' ] . '" />' . PHP_EOL;
if( isset( $insert[ 'template_content_plaintext' ] ) )
{
$plainText = $header . $insert[ 'template_content_plaintext' ];
\file_put_contents( $email . $insert[ 'template_name' ] . '.txt', $plainText );
}
if( isset( $insert[ 'template_content_html' ] ) )
{
$plainText = $header . $insert[ 'template_content_html' ];
\file_put_contents( $email . $insert[ 'template_name' ] . '.phtml', $plainText );
}
}
return 'storm_apps_return_email';
}
public function language()
{
$xml = new \IPS\Xml\XMLReader;
$xml->open( $this->dir . "/data/lang.xml" );
$xml->read();
$xml->read();
$xml->read();
$lang = [];
$langJs = [];
$member = \IPS\Member::loggedIn()->language();
/* Start looping through each word */
while( $xml->read() )
{
if( $xml->name != 'word' OR $xml->nodeType != \XMLReader::ELEMENT )
{
continue;
}
$key = $xml->getAttribute( 'key' );
$value = $xml->readString();
$js = (int)$xml->getAttribute( 'js' );
$lang[ $key ] = $value;
if( $js )
{
$langJs[ $key ] = $value;
}
if( $this->addToStack )
{
$member->words[ $key ] = $value;
}
}
\file_put_contents( $this->dev . "lang.php", "<?php\n\n \$lang = " . var_export( $lang, true ) . ";\n" );
\file_put_contents( $this->dev . "jslang.php", "<?php\n\n \$lang = " . var_export( $langJs, true ) . ";\n" );
return 'storm_apps_return_lang';
}
}<file_sep>/hooks/adminGlobalThemeHook.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_adminGlobalThemeHook extends _HOOK_CLASS_
{
/* !Hook Data - DO NOT REMOVE */
public static function hookData() {
return array_merge_recursive( array (
'globalTemplate' =>
array (
0 =>
array (
'selector' => '#ipsLayout_header',
'type' => 'add_inside_start',
'content' => '{{$devBar = \IPS\storm\Menu::devBar(); }}{$devBar|raw}',
) )
), parent::hookData() );
}
/* End Hook Data */
public function tabs( $tabNames, $activeId, $defaultContent, $url, $tabParam = 'tab' )
{
if ( \IPS\Request::i()->app == "core" and \IPS\Request::i()->module == "applications" and \IPS\Request::i()->controller == "developer" and !\IPS\Request::i()->do ) {
$tabNames[ 'class' ] = 'dev_class';
$tabNames[ 'DevFolder' ] = 'storm_dev_folder';
}
return parent::tabs( $tabNames, $activeId, $defaultContent, $url, $tabParam );
}
public function globalTemplate( $title, $html, $location = [] )
{
if( !\IPS\Settings::i()->storm_settings_disable_menu)
{
$version = \IPS\Application::load( 'core' );
if( $version->long_version < 101110 )
{
\IPS\Output::i()->cssFiles = \array_merge(
\IPS\Output::i()->cssFiles,
\IPS\Theme::i()->css(
'devbar/devbar2.css',
'storm',
'admin'
)
);
}
else
{
\IPS\Output::i()->cssFiles = \array_merge(
\IPS\Output::i()->cssFiles,
\IPS\Theme::i()->css(
'devbar/devbar.css',
'storm',
'admin'
)
);
}
}
$parent = parent::globalTemplate( $title, $html, $location );
$parent = \str_replace( '</body>', "<!--ipsQueryLog--></body>", $parent);
return $parent;
}
}
<file_sep>/hooks/frontGlobal.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
exit;
}
class storm_hook_frontGlobal extends _HOOK_CLASS_
{
/* !Hook Data - DO NOT REMOVE */
public static function hookData() {
return parent::hookData();
}
/* End Hook Data */
function queryLog( $log ){
if( defined( 'CJ_STORM_PROFILER') and CJ_STORM_PROFILER ) {
return \IPS\storm\Profiler::i( 1 )->run();
}
else{
return parent::queryLog( $log );
}
}
}
<file_sep>/sources/Forms/Forms.php
<?php
/**
* @brief Forms Forms Class
* @author <a href='http://codingjungle.com'><NAME></a>
* @copyright (c) 2017 <NAME>
* @package IPS Social Suite
* @subpackage Storm
* @since -storm_since_version-
* @version 3.0.4
* forms version 1.0.6
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Forms
{
/**
* @brief multiton store
*/
protected static $instance = [];
/**
* instantiate Forms class
*
* @param array $elements the array of elements to build
* @param object $object a record element for the form
* @param string $name name of the form
* @param \IPS\Helpers\Form|null $form can pass an existing form object
* @param string $id html id of the form
* @param string $submitLang lang string for submit button
* @param null $action where it post to
* @param array $attributes any addition attributes that need to be pass
*
* @return mixed
*/
public static function i( array $elements, $object = null, $name = 'default', $form = null, $id = 'form', $submitLang = 'save', $action = null, $attributes = [] )
{
if( !$name )
{
$name = md5( rand( 1, 100000 ) );
}
if( !isset( static::$instance[ $name ] ) )
{
$class = get_called_class();
static::$instance[ $name ] = new $class();
static::$instance[ $name ]->elements = $elements;
static::$instance[ $name ]->obj = $object;
if( $form instanceof \IPS\Helpers\Form )
{
static::$instance[ $name ]->form = $form;
}
else
{
static::$instance[ $name ]->form = new \IPS\Helpers\Form( $id, $submitLang, $action, $attributes );
}
if( $id )
{
static::$instance[ $name ]->form->id = $id;
}
}
return static::$instance[ $name ]->run();
}
/**
* @brief for use in run once the object is instantiated
* @var \IPS\Helpers\Form|null
*/
protected $form = null;
/**
* @brief form helpers store
* @var array
*/
protected $elements = [];
/**
* @brief the form record object
* @var null
**/
protected $obj = null;
/**
* @brief the language prefix
* @var null
*/
protected $langPrefix = null;
/**
* @brief the class map for form elements
* @var array
*/
protected $classMap = [
'address' => 'Address',
'addy' => 'Address',
'captcha' => 'Captcha',
'checkbox' => 'Checkbox',
'cb' => 'Checkbox',
'checkboxset' => 'CheckboxSet',
'cbs' => 'CheckboxSet',
'codemirror' => 'Codemirror',
'cm' => 'Codemirror',
'color' => 'Color',
'custom' => 'Custom',
'date' => 'Date',
'daterange' => 'DateRange',
'dr' => 'DateRange',
'editor' => 'Editor',
'email' => 'Email',
'ftp' => 'Ftp',
'item' => 'Item',
'keyvalue' => 'KeyValue',
'kv' => 'KeyValue',
'matrix' => 'Matrix',
'member' => 'Member',
'node' => 'Node',
'number' => 'Number',
'#' => 'Number',
'password' => '<PASSWORD>',
'pw' => '<PASSWORD>',
'poll' => 'Poll',
'radio' => 'Radio',
'rating' => 'Rating',
'search' => 'Search',
'select' => 'Select',
'socialgroup' => 'SocialGroup',
'sg' => 'SocialGroup',
'sort' => 'Sort',
'stack' => 'Stack',
'tel' => 'Tel',
'text' => 'Text',
'textarea' => 'TextArea',
'ta' => 'TextArea',
'timezone' => 'TimeZone',
'translatable' => 'Translatable',
'trans' => 'Translatable',
'upload' => 'Upload',
'up' => 'Upload',
'url' => 'Url',
'widthheight' => 'WidthHeight',
'wh' => 'WidthHeight',
'yesno' => 'YesNo',
'yn' => 'YesNo'
];
/**
* _Forms constructor.
*/
final protected function __construct()
{
}
/**
* executes and builds the form
*
* @return \IPS\Helpers\Form|null
*/
public function run()
{
$langPrefix = '';
if( isset( $this->elements[ 'prefix' ] ) )
{
$this->langPrefix = $langPrefix = $this->elements[ 'prefix' ];
unset( $this->elements[ 'prefix' ] );
}
$typesWName = [
'tab',
'header',
'sidebar',
'helper',
'dummy',
'matrix',
'hidden',
];
foreach( $this->elements as $el )
{
if( !is_array( $el ) or !count( $el ) )
{
continue;
}
if( isset( $el[ 'type' ] ) )
{
$type = $el[ 'type' ];
}
else
{
$type = 'helper';
}
if( in_array( $type, $typesWName ) )
{
if( isset( $el[ 'name' ] ) )
{
$name = $langPrefix . $el[ 'name' ];
}
else
{
throw new \InvalidArgumentException( var_dump( $el ) );
}
}
$this->setExtra( $el );
switch( $type )
{
case 'tab':
$this->form->addTab( $langPrefix . $name . '_tab' );
break;
case 'header':
$this->form->addHeader( $langPrefix . $name . '_header' );
break;
case 'sidebar':
if( \IPS\Member::loggedIn()->language()->checkKeyExists( $name ) )
{
$element = \IPS\Member::loggedIn()->language()->addToStack( $name );
}
$this->form->addSidebar( $element );
break;
case 'separator':
$this->form->addSeparator();
break;
case 'message':
if( isset( $el[ 'msg' ] ) )
{
$lang = $el[ 'msg' ];
}
else
{
throw new \InvalidArgumentException;
}
$css = '';
if( isset( $el[ 'css' ] ) )
{
$css = $el[ 'css' ];
}
$parse = true;
if( isset( $el[ 'parse' ] ) )
{
$parse = $el[ 'parse' ] ? true : false;
}
$id = null;
if( isset( $el[ 'id' ] ) )
{
$id = $el[ 'id' ];
}
$this->form->addMessage( $lang, $css, $parse, $id );
break;
case 'helper':
if( !isset( $el[ 'customClass' ] ) )
{
if( isset( $el[ 'class' ] ) )
{
$class = $el[ 'class' ];
if( isset( $this->classMap[ $class ] ) )
{
$class = $this->classMap[ $class ];
}
$class = '\\IPS\\Helpers\\Form\\' . $class;
}
else
{
$class = '\\IPS\\Helpers\\Form\\Text';
}
}
else
{
$class = $el[ 'customClass' ];
}
if( !class_exists( $class, true ) )
{
throw new \InvalidArgumentException( json_encode( $el ) );
}
$default = null;
if( is_object( $this->obj ) )
{
$obj = $this->obj;
$prop = $el[ 'name' ];
if( $obj->{$prop} )
{
$default = $obj->{$prop};
}
else
{
$prop = $langPrefix . $prop;
if( $obj->{$prop} )
{
$default = $obj->{$prop};
}
}
if( $default == null )
{
if( isset( $el[ 'default' ] ) or isset( $el[ 'def' ] ) )
{
$default = isset( $el[ 'default' ] ) ? $el[ 'default' ] : $el[ 'def' ];
}
}
}
else
{
if( isset( $el[ 'default' ] ) or isset( $el[ 'def' ] ) )
{
$default = isset( $el[ 'default' ] ) ? $el[ 'default' ] : $el[ 'def' ];
}
}
$required = false;
if( isset( $el[ 'required' ] ) )
{
$required = $el[ 'required' ];
}
$options = [];
if( isset( $el[ 'options' ] ) )
{
$options = $el[ 'options' ];
}
else if( isset( $el[ 'ops' ] ) )
{
$options = $el[ 'ops' ];
}
if( \is_array( $options ) and \count( $options ) )
{
if( isset( $options[ 'toggles' ] ) )
{
foreach( $options[ 'toggles' ] as $key => $val )
{
foreach( $val as $k => $v )
{
$options[ 'toggles' ][ $key ][ $k ] = 'js_' . $langPrefix . $v;
}
}
}
if( isset( $options[ 'togglesOn' ] ) )
{
foreach( $options[ 'togglesOn' ] as $key => $val )
{
$options[ 'togglesOn' ][] = 'js_' . $langPrefix . $val;
}
}
if( isset( $options[ 'togglesOff' ] ) )
{
foreach( $options[ 'togglesOff' ] as $key => $val )
{
$options[ 'togglesOff' ][] = 'js_' . $langPrefix . $val;
}
}
}
$validation = null;
if( isset( $el[ 'validation' ] ) )
{
$validation = $el[ 'validation' ];
}
else if( isset( $el[ 'v' ] ) )
{
$validation = $el[ 'v' ];
}
$prefix = null;
if( isset( $el[ 'prefix' ] ) )
{
$prefix = $el[ 'prefix' ];
}
$suffix = null;
if( isset( $el[ 'suffix' ] ) )
{
$suffix = $el[ 'suffix' ];
}
$id = null;
if( isset( $el[ 'id' ] ) )
{
$id = $el[ 'id' ];
}
else
{
if( !isset( $el[ 'skip_id' ] ) )
{
$id = "js_" . $name;
}
}
$element = new $class( $name, $default, $required, $options, $validation, $prefix, $suffix, $id );
if( isset( $el[ 'appearRequired' ] ) or isset( $el[ 'ap' ] ) )
{
$element->appearRequired = true;
}
if( isset( $el[ 'label' ] ) )
{
$element->label = $el[ 'label' ];
}
if( isset( $el[ 'description' ] ) )
{
$desc = $el[ 'description' ];
if( \IPS\Member::loggedIn()->language()->checkKeyExists( $desc ) )
{
if( isset( $el[ 'desc_sprintf' ] ) )
{
$sprintf = $el[ 'desc_sprintf' ];
if( !is_array( $sprintf ) )
{
$sprintf = [ $sprintf ];
}
$desc = \IPS\Member::loggedIn()
->language()
->addToStack( $desc, false, [ 'sprintf' => $sprintf ] );
}
else
{
$desc = \IPS\Member::loggedIn()->language()->addToStack( $desc );
}
}
\IPS\Member::loggedIn()->language()->words[ $name . '_desc' ] = $desc;
}
$tab = null;
$after = null;
if( isset( $el[ 'tab' ] ) )
{
$tab = $langPrefix . $el[ 'tab' ] . '_tab';
}
if( isset( $el[ 'after' ] ) )
{
$after = $langPrefix . $el[ 'after' ];
}
$this->form->add( $element, $after, $tab );
break;
case 'dummy':
$default = null;
if( isset( $el[ 'default' ] ) )
{
$default = $el[ 'default' ];
}
$desc = '';
if( isset( $el[ 'desc' ] ) )
{
if( \IPS\Member::loggedIn()->language()->checkKeyExists( $el[ 'desc' ] ) )
{
$desc = \IPS\Member::loggedIn()->language()->addToStack( $el[ 'desc' ] );
}
else
{
$desc = $el[ 'desc' ];
}
}
$warning = '';
if( isset( $el[ 'warning' ] ) )
{
if( \IPS\Member::loggedIn()->language()->checkKeyExists( $el[ 'warning' ] ) )
{
$warning = \IPS\Member::loggedIn()->language()->addToStack( $el[ 'warning' ] );
}
else
{
$warning = $el[ 'warning' ];
}
}
if( isset( $el[ 'id' ] ) )
{
$id = $el[ 'id' ];
}
else
{
$id = $name . "_js";
}
$this->form->addDummy( $name, $default, $desc, $warning, $id );
break;
case 'html':
if( !isset( $el[ 'html' ] ) )
{
throw new \InvalidArgumentException;
}
$this->form->addHtml( $el[ 'html' ] );
break;
case 'matrix':
if( isset( $el[ 'matrix' ] ) )
{
if( !( $el[ 'matrix' ] instanceof \IPS\Helpers\Form\Matrix ) )
{
throw new \InvalidArgumentException;
}
}
$this->form->addMatrix( $name, $el[ 'matrix' ] );
break;
case 'hidden':
$this->form->hiddenValues[ $name ] = $el[ 'default' ];
break;
}
}
return $this->form;
}
/**
* adds a header/tab/sidebar to an element
* @param $el
*/
final protected function setExtra( $el )
{
if( isset( $el[ 'header' ] ) )
{
$this->form->addHeader( $this->langPrefix . $el[ 'header' ] . '_header' );
}
if( isset( $el[ 'tab' ] ) )
{
$this->form->addTab( $this->langPrefix . $el[ 'tab' ] . '_tab' );
}
if( isset( $el[ 'sidebar' ] ) )
{
$sideBar = $this->langPrefix . $el[ 'sidebar' ] . '_sidebar';
if( \IPS\Member::loggedIn()->language()->checkKeyExists( $sideBar ) )
{
$sideBar = \IPS\Member::loggedIn()->language()->addToStack( $sideBar );
}
$this->form->addSidebar( $sideBar );
}
}
}<file_sep>/setup/upg_30007/upgrade.php
<?php
/**
* @brief Upgrade Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 3.0.7
* @version -storm_version-
*/
namespace IPS\storm\setup\upg_30007;
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER['SERVER_PROTOCOL'] ) ? $_SERVER['SERVER_PROTOCOL'] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* 3.0.7 Upgrade Code
*/
class _Upgrade
{
/**
* ...
*
* @return array If returns TRUE, upgrader will proceed to next step. If it returns any other value, it will set this as the value of the 'extra' GET parameter and rerun this step (useful for loops)
*/
public function step1()
{
$menu = [
'parent' => 'Storm',
'name' => 'Sync',
'type' => 'int',
'url' => 'app=storm&module=configuration&controller=sync',
'original' => 'sync'
];
\IPS\storm\Menu::addMenu($menu);
\IPS\storm\Menu::kerching();
return TRUE;
}
// You can create as many additional methods (step2, step3, etc.) as is necessary.
// Each step will be executed in a new HTTP request
}<file_sep>/extensions/storm/Headerdoc/storm.php
<?php
/**
* @brief Storm Headerdoc extension: Storm
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.4
* @version -storm_version-
*/
namespace IPS\storm\extensions\storm\Headerdoc;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* storm
*/
class _storm extends \IPS\storm\Headerdoc\HeaderdocAbstract
{
/**
* enable headerdoc
**/
public function headerDocEnabled()
{
return true;
}
/**
* enable add index.html
**/
public function indexEnabled()
{
return true;
}
/**
* files to skip during building of the tar
**/
public function headerDocFilesSkip()
{
return [];
}
/**
* directories to skip during building of the tar
**/
public function headerDocDirSkip()
{
return [];
}
/**
* an array of files/folders to exclude in the headerdoc
**/
public function headerDocExclude()
{
return [];
}
}<file_sep>/hooks/Application.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Application extends _HOOK_CLASS_
{
public function assignNewVersion( $long, $human )
{
parent::assignNewVersion( $long, $human );
$this->version = $human;
\IPS\storm\Headerdoc::i()->process( $this );
}
public function build()
{
\IPS\storm\Headerdoc::i()->addIndexHtml( $this );
parent::build();
}
public function installJavascript( $offset=null, $limit=null ){
parent::installJavascript($offset, $limit);
\IPS\storm\Proxyclass::i()->generateSettings();
}
public function installOther()
{
if ( \IPS\IN_DEV and defined( 'CJ_STORM_BUILD_DEV' ) and CJ_STORM_BUILD_DEV ) {
$dir = \IPS\ROOT_PATH . "/applications/" . $this->directory . "/dev/";
if ( !file_exists( $dir ) and $this->directory !== "storm" ) {
$app = new \IPS\storm\Apps( $this );
$app->addToStack = true;
$app->email();
$app->javascript();
$app->language();
$app->templates();
}
}
parent::installOther();
}
}
<file_sep>/hooks/applicationBuilderIterator.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_applicationBuilderIterator extends _HOOK_CLASS_
{
/**
* Current value
*
* @return void
*/
public function current()
{
$file = (string) parent::current();
if ( mb_substr( str_replace( '\\', '/', $file ),
mb_strlen( \IPS\ROOT_PATH . "/applications/" . $this->application->directory ) + 1, 6 ) === 'hooks/'
) {
$temporary = tempnam( \IPS\TEMP_DIRECTORY, 'IPS' );
\file_put_contents( $temporary, \IPS\Plugin::addExceptionHandlingToHookFile( $file ) );
register_shutdown_function( function ( $temporary ) {
unlink( $temporary );
}, $temporary );
return $temporary;
}
else {
if ( is_file( $file ) and ( mb_strpos( $file, '3rdparty' ) === false or mb_strpos( $file,
'3rd_party' ) === false or mb_strpos( $file, 'vendor' ) === false )
) {
if ( !\IPS\storm\Headerdoc::i()->can( $this->application ) ) {
return $file;
}
$path = new \SplFileInfo( $file );
if ( $path->getExtension() == "js" and $path->getFilename() === "babble.js" ) {
$temp = tempnam( \IPS\TEMP_DIRECTORY, 'IPS' );
$content = \file_get_contents( $file );
$replace = \preg_replace( "#var privateKey = '(.*?)';#", "var privateKey = '';", $content );
\file_put_contents( $temp, $replace );
return $temp;
}
if ( $path->getExtension() == "php" ) {
$temporary = tempnam( \IPS\TEMP_DIRECTORY, 'IPS' );
$contents = \file_get_contents( $file );
foreach ( $this->application->extensions( 'storm', 'Headerdoc' ) as $class ) {
if ( method_exists( $class, 'headerDocFinalize' ) ) {
$contents = $class->headerDocFinalize( $contents, $this->application );
}
}
\file_put_contents( $temporary, $contents );
register_shutdown_function( function ( $temporary ) {
unlink( $temporary );
}, $temporary );
return $temporary;
}
}
return $file;
}
}
}
<file_sep>/dev/js/admin/controllers/query/ips.query.query.js
;( function($, _, undefined){
"use strict";
ips.controller.register('storm.admin.query.query', {
initialize: function () {
this.on('change', '[id="elSelect_js_storm_query_table"]', this._getFields);
this.on('change', '[id="elSelect_js_storm_query_columns"]', this._getFields);
},
_getFields: function(e){
// console.log('yes');
var url = ips.getSetting('storm_table_url');
var ajax = ips.getAjax();
ajax( {
url: url+"&do=getFields&table="+$(e.target).val(),
type: "GET",
success:function(data){
console.log( data );
if( data.error == 0 ) {
$('#elSelect_js_storm_query_columns').replaceWith(data.html);
}
}
} );
}
});
}(jQuery, _));<file_sep>/sources/Forms/Select.php
<?php
/**
* @brief Select Class
* @author <a href='http://codingjungle.com'><NAME></a>
* @copyright (c) 2017 <NAME>
* @package IPS Social Suite
* @subpackage Storm
* @since -storm_since_version-
* @version 3.0.4
*/
namespace IPS\storm\Forms;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Select extends \IPS\Helpers\Form\Select
{
/**
* Validate
*
* @throws \OutOfRangeException
* @return TRUE
*/
public function validate()
{
return true;
}
}<file_sep>/sources/Profiler/Template.php
<?php
/**
* @brief Template Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 2.0.0
* @version -storm_version-
*/
namespace IPS\storm\Profiler;
class _Template extends \IPS\Patterns\Singleton
{
protected static $instance = null;
protected $storm = null;
protected $langs = null;
public function __construct()
{
$this->db_execution_time = "Execution Time: %s";
$this->memory_name = "Name: %s";
$this->filename = "Filename: %s";
$this->path = "Path: %s";
$this->size = "Size: %s";
$this->storm = \IPS\storm\Profiler::i();
}
public function tabs()
{
$storm = $this->storm;
$this->langs = [
'console' => sprintf( 'Console (%s)', $storm->consoleTab ),
'dbTab' => sprintf( 'DB Queries (%s)', $storm->dbQueriesTab ),
'memoryTab' => sprintf( 'Memory (%s)', $storm->memoryTab ),
'fileTab' => sprintf( 'Files (%s)', $storm->fileTab ),
'timeTab' => sprintf( 'Execution Times (%s)', $storm->speedTab ),
'cacheTab' => sprintf( 'Cache (%s)', $storm->cacheTab ),
'logsTab' => sprintf( 'Logs (<span id="profilerLogTabCount">%s</span>)', $storm->logsTab ),
'none' => "<div class='ipsPad'>This is not the tab you are looking for.</div>",
'memTotal' => sprintf( "%s<br>Memory Used", $this->storm->memoryTotal ),
'dbTotal' => sprintf( "%s<br>DB Queries", $this->storm->dbQueriesTab ),
'fileTotal' => sprintf( "%s<br>Included Files", $this->storm->fileTab ),
'timeTotal' => sprintf( "%s<br>Execution Time", $this->storm->totalTime ),
'cacheTotal' => sprintf( "%s<br> Caches", $this->storm->cacheTab ),
'logsTotal' => sprintf( "%s logs", $this->storm->logsTab ),
];
$fixed = '';
$button = '';
if( !\IPS\Settings::i()->storm_profiler_is_fixed )
{
$fixed = "stormProfilerFixed";
$button = <<<EOF
<div class="stormProfilerButtonContainer">
<button id="eLstormButton" type="button" class="ipsButton stormProfileButton" data-ipsstormprofile>Profiler</button>
</div>
EOF;
}
$langs = $this->langs;
$dbTab = '';
if( $this->storm->dbEnabled )
{
$dbTab = <<<EOF
<li role='presentation'>
<a href='#' id='stormProfilerDbQueries' role='tab' class='ipsTabs_item'>
{$langs['dbTab']}
</a>
</li>
EOF;
}
$memTab = '';
if( $this->storm->memEnabled )
{
$memTab = <<<EOF
<li role='presentation'>
<a href='#' id='stormProfilerMemory' role='tab' class='ipsTabs_item'>
{$langs['memoryTab']}
</a>
</li>
EOF;
}
$executionTab = '';
if( $this->storm->timeEnabled )
{
$executionTab = <<<EOF
<li role='presentation'>
<a href='#' id='stormProfilerTime' role='tab' class='ipsTabs_item'>
{$langs['timeTab']}
</a>
</li>
EOF;
}
$cacheTab = '';
if( $this->storm->cacheEnabled )
{
$cacheTab = <<<EOF
<li role='presentation'>
<a href='#' id='stormProfilerCache' role='tab' class='ipsTabs_item'>
{$langs['cacheTab']}
</a>
</li>
EOF;
}
$logTab = '';
if( $this->storm->logsEnabled )
{
$logTab = <<<EOF
<li role='presentation'>
<a href='#' id='stormProfilerLog' role='tab' class='ipsTabs_item'>
{$langs['logsTab']}
</a>
</li>
EOF;
}
$html = <<<EOF
<div class="stormProfile {$fixed}" >
{$button}
<div id="eLstormTabs" class=" stormProfileTabs">
<div class='ipsTabs ipsClearfix' id='elStormProfilerTabs' data-ipsTabBar data-ipsTabBar-contentArea='#elStormProfilerTabsContent' data-ipsTabBar-activeClass="stormActiveTab">
<a href='#elStormProfilerTabs' data-action='expandTabs'><i class='icon-caret-down'></i></a>
<ul role='tablist'>
<li role='presentation'>
<a href='#' id='elStormTabConsole' class='ipsTabs_item'>
{$langs['console']}
</a>
</li>
{$dbTab}
{$memTab}
{$executionTab}
{$cacheTab}
{$logTab}
</ul>
</div>
<section id='elStormProfilerTabsContent' class='ipsTabs_panels'>
{$this->consoleTab()}
{$this->memoryTab()}
{$this->dbQueryTab()}
{$this->cacheTab()}
{$this->speedTab()}
{$this->logTab()}
</section>
</div>
</div>
EOF;
return $html;
}
public function consoleTab()
{
$consolo = $this->storm->processedLogs;
$dbTab = '';
if( $this->storm->dbEnabled )
{
$dbTab = <<<EOF
<div class='ipsGrid_span6 stormProfilerConsoleSide stormProfilerDbQueriesConsole'>
<span>
{$this->langs['dbTotal']}
</span>
</div>
EOF;
}
$memTab = '';
if( $this->storm->memEnabled )
{
$memTab = <<<EOF
<div class='ipsGrid_span6 stormProfilerConsoleSide stormProfilerMemoryConsole'>
<span>
{$this->langs['memTotal']}
</span>
</div>
EOF;
}
$filesTab = '';
if( $this->storm->filesEnabled )
{
$filesTab = <<<EOF
<div class='ipsGrid_span6 stormProfilerConsoleSide stormProfilerFilesConsole'>
<span>
{$this->langs['fileTotal']}
</span>
</div>
EOF;
}
$executionTab = '';
if( $this->storm->timeEnabled )
{
$executionTab = <<<EOF
<div class='ipsGrid_span6 stormProfilerConsoleSide stormProfileTimeConsole'>
<span>
{$this->langs['timeTotal']}
</span>
</div>
EOF;
}
$cacheTab = '';
if( $this->storm->cacheEnabled )
{
$cacheTab = <<<EOF
<div class='ipsGrid_span6 stormProfilerConsoleSide stormProfilerCacheConsole'>
<span>
{$this->langs['cacheTotal']}
</span>
</div>
EOF;
}
$logTab = '';
if( $this->storm->logsEnabled )
{
$logTab = <<<EOF
<div class='ipsGrid_span6 stormProfilerConsoleSide stormProfilerLogConsole'>
<span>
{$this->langs['logsTotal']}
</span>
</div>
EOF;
}
$html = <<<EOF
<div id="ipsTabs_elStormProfilerTabs_elStormTabConsole_panel" class="ipsTabs_panel ipsPad stormProfilerPanels">
<div class="stormProfileConsoleBrief">
<div class='ipsColumns'>
<div class="ipsColumn ipsColumn_veryWide">
<div class='ipsGrid'>
{$dbTab}
{$memTab}
{$filesTab}
{$executionTab}
{$cacheTab}
{$logTab}
</div>
</div>
<div class="ipsColumn ipsColumn_fluid">
<div class="stormProfilerBaseContainer stormProfilerLogs ">
{$consolo}
</div>
</div>
</div>
</div>
</div>
EOF;
return $html;
}
public function memoryTab()
{
if( $this->storm->memEnabled )
{
if( $this->storm->memoryList )
{
$memory = $this->storm->memoryList;
}
else
{
$memory = $this->langs[ 'none' ];
}
$html = <<<EOF
<div id="ipsTabs_elStormProfilerTabs_stormProfilerMemory_panel" class="ipsTabs_panel ipsPad stormProfilerPanels">
<div class="stormProfileConsoleBrief">
<div class='ipsColumns'>
<div class="ipsColumn ipsColumn_veryWide">
<div class='ipsGrid'>
<div class='ipsGrid_span12 stormProfilerConsoleSide stormProfilerMemoryConsole'>
<span>
{$this->langs['memTotal']}
</span>
</div>
</div>
</div>
<div class="ipsColumn ipsColumn_fluid">
<div class="stormProfilerBaseContainer stormProfilerLogs">
{$memory}
</div>
</div>
</div>
</div>
</div>
EOF;
return $html;
}
}
public function dbQueryTab()
{
if( $this->storm->dbEnabled )
{
if( !$this->storm->dbQueriesList )
{
$db = $this->langs[ 'none' ];
}
else
{
$db = $this->storm->dbQueriesList;
}
$html = <<<EOF
<div id="ipsTabs_elStormProfilerTabs_stormProfilerDbQueries_panel" class="ipsTabs_panel ipsPad stormProfilerPanels">
<div class="stormProfileConsoleBrief">
<div class='ipsColumns'>
<div class="ipsColumn ipsColumn_veryWide">
<div class='ipsGrid'>
<div class='ipsGrid_span12 stormProfilerConsoleSide stormProfilerDbQueriesConsole'>
<span>
{$this->langs['dbTotal']}
</span>
</div>
</div>
</div>
<div class="ipsColumn ipsColumn_fluid">
<div class="stormProfilerBaseContainer stormProfilerLogs">
{$db}
</div>
</div>
</div>
</div>
</div>
EOF;
return $html;
}
}
public function cacheTab()
{
if( $this->storm->cacheEnabled )
{
if( !$this->storm->cacheList )
{
$totals = $this->langs[ 'none' ];
}
else
{
$totals = $this->storm->cacheList;
}
$html = <<<EOF
<div id="ipsTabs_elStormProfilerTabs_stormProfilerCache_panel" class="ipsTabs_panel ipsPad stormProfilerPanels">
<div class="stormProfileConsoleBrief">
<div class='ipsColumns'>
<div class="ipsColumn ipsColumn_veryWide">
<div class='ipsGrid'>
<div class='ipsGrid_span12 stormProfilerConsoleSide stormProfilerCacheConsole'>
<span>
{$this->langs['cacheTotal']}
</span>
</div>
</div>
</div>
<div class="ipsColumn ipsColumn_fluid">
<div class="stormProfilerBaseContainer stormProfilerLogs">
{$totals}
</div>
</div>
</div>
</div>
</div>
EOF;
return $html;
}
}
public function speedTab()
{
if( $this->storm->timeEnabled )
{
if( !$this->storm->speedList )
{
$totals = $this->langs[ 'none' ];
}
else
{
$totals = $this->storm->speedList;
}
$html = <<<EOF
<div id="ipsTabs_elStormProfilerTabs_stormProfilerTime_panel" class="ipsTabs_panel ipsPad stormProfilerPanels">
<div class="stormProfileConsoleBrief">
<div class='ipsColumns'>
<div class="ipsColumn ipsColumn_veryWide">
<div class='ipsGrid'>
<div class='ipsGrid_span12 stormProfilerConsoleSide stormProfileTimeConsole'>
<span>
{$this->langs['timeTotal']}
</span>
</div>
</div>
</div>
<div class="ipsColumn ipsColumn_fluid">
<div class="stormProfilerBaseContainer stormProfilerLogs">
{$totals}
</div>
</div>
</div>
</div>
</div>
EOF;
return $html;
}
}
public function logTab()
{
if( $this->storm->logsEnabled )
{
$url = \IPS\Settings::i()->base_url . 'applications/storm/interface/logs/logs.php';
$time = time();
if( !$this->storm->ipsLogsList )
{
$totals = $this->langs[ 'none' ];
}
else
{
$totals = $this->storm->ipsLogsList;
}
$html = <<<EOF
<div id="ipsTabs_elStormProfilerTabs_stormProfilerLog_panel" class="ipsTabs_panel ipsPad stormProfilerPanels">
<div class="stormProfileConsoleBrief">
<div class='ipsColumns'>
<div class="ipsColumn ipsColumn_veryWide">
<div class='ipsGrid'>
<div class='ipsGrid_span12 stormProfilerConsoleSide stormProfilerLogConsole'>
<span>
{$this->langs['logsTotal']}
</span>
</div>
</div>
</div>
<div class="ipsColumn ipsColumn_fluid">
<div id="stormProfilerLogs" class="stormProfilerBaseContainer stormProfilerLogs" data-stormtime="{$time}" data-ipsstormdebug data-ipsstormdebug-url="{$url}">
{$totals}
</div>
</div>
</div>
</div>
</div>
EOF;
return $html;
}
}
public function consoleContainer( $type, $body, $lang, $class = '' )
{
return <<<EOF
<div class="ipsColumns stormProfilerSpacer stormProfiler{$type}{$class}">
<div class="ipsColumn ipsColumn_narrow ipsPad">{$lang}</div>
<div class="ipsColumn ipsColumn_fluid ipsPad">{$body}</div>
</div>
EOF;
}
public function memory( array $mem )
{
if( $this->storm->memEnabled )
{
$lang = sprintf( $this->memory_name, $mem[ 'name' ] );
return <<<EOF
<div class="stormProfilerBase">
<div>{$lang}</div>
<div>{$mem['memory']}<br>{$mem['msg']}</div>
</div>
EOF;
}
}
public function db( $query )
{
if( $this->storm->dbEnabled )
{
$u = \IPS\Http\Url::internal( 'app=storm&module=general&controller=general&do=backtrace&id=' . $query[ 'backtrace' ],
'front' );
$lang = sprintf( $this->db_execution_time, $query[ 'time' ] );
$html = <<<EOF
<div class="stormProfilerBase" data-ipsDialog data-ipsDialog-url="{$u}">
<div>{$lang}</div>
<div>
<code class="prettyprint lang-sql stormProfilerBasePointer">{$query['query']}</code>
</div>
</div>
EOF;
return $html;
}
}
public function cache( $type, $key, $num )
{
if( $this->storm->cacheEnabled )
{
$u = \IPS\Http\Url::internal( 'app=storm&module=general&controller=general&do=cache&id=' . $num, 'front' );
$type = sprintf( 'Type: %s', $type );
$key = sprintf( 'Key: %s', $key );
$html = <<<EOF
<div class="stormProfilerBase stormProfilerCacheLog" data-ipsDialog data-ipsDialog-url="{$u}">
<div>{$type}</div>
<div>{$key}</div>
</div>
EOF;
return $html;
}
}
public function speed( $for, $time, $percent )
{
if( $this->storm->timeEnabled )
{
$for = sprintf( 'For: %s', $for );
$time = sprintf( 'Execution Time: %s', $time );
$percent = sprintf( 'Percentage of Total Time: %s', $percent );
$html = <<<EOF
<div class="stormProfilerBase">
<div>{$for}</div>
<div>{$time}</div>
<div>{$percent}</div>
</div>
EOF;
return $html;
}
}
public function log( $data )
{
if( $this->storm->logsEnabled )
{
$u = uniqid();
$exception_class = '';
$exception_code = '';
$category = '';
$msg = nl2br( $data[ 'message' ] . "<br>" . $data[ 'backtrace' ] );
$trunc = \htmlspecialchars( \mb_substr( \html_entity_decode( $data[ 'message' ] ), 0, 20 ), ENT_NOQUOTES,
'UTF-8', false );
if( $data[ 'exception_class' ] )
{
$lang = sprintf( "Exception's Class: %s", $data[ 'exception_class' ] );
$exception_class = "<div>{$lang}</div>";
}
if( $data[ 'exception_code' ] )
{
$lang = sprintf( "Exception's Code: %s", $data[ 'exception_code' ] );
$exception_code = "<div>{$lang}</div>";
}
if( $data[ 'category' ] )
{
$lang = sprintf( "Category: %s", $data[ 'category' ] );
$category = "<div>{$lang}</div>";
}
$html = <<<EOF
<div class="stormProfilerBase stormProfilerBasePointer" data-ipsDialog data-ipsDialog-content="#{$u}">
{$exception_class}
{$exception_code}
{$category}
<div>{$trunc}</div>
<div id="{$u}" class="ipsHide ipsPad">
{$msg}
</div>
</div>
EOF;
return $html;
}
}
public function logObj( $data )
{
if( $this->storm->logsEnabled )
{
$u = uniqid();
$exception_class = '';
$exception_code = '';
$category = '';
$msg = nl2br( $data->message . "<br>" . $data->backtrace );
$trunc = \htmlspecialchars( \mb_substr( \html_entity_decode( $data->message ), 0, 20 ), ENT_NOQUOTES,
'UTF-8', false );
if( $data->exception_class )
{
$lang = sprintf( "Exception's Class: %s", $data->exception_class );
$exception_class = "<div>{$lang}</div>";
}
if( $data->exception_code )
{
$lang = sprintf( "Exception's Code: %s", $data->exception_code );
$exception_code = "<div>{$lang}</div>";
}
if( $data->category )
{
$lang = sprintf( "Category: %s", $data->category );
$category = "<div>{$lang}</div>";
}
$html = <<<EOF
<div class="stormProfilerBase stormProfilerBasePointer" data-ipsDialog data-ipsDialog-content="#{$u}">
{$exception_class}
{$exception_code}
{$category}
<div>{$trunc}</div>
<div id="{$u}" class="ipsHide ipsPad">
{$msg}
</div>
</div>
EOF;
return $html;
}
}
}
<file_sep>/hooks/Lang.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Lang extends _HOOK_CLASS_
{
public function parseOutputForDisplay( &$output )
{
\IPS\storm\Profiler::i()->timeStart();
parent::parseOutputForDisplay( $output );
\IPS\storm\Profiler::i()->timeEnd( 'parseOutputForDisplay' );
}
}
<file_sep>/sources/Profiler/Profiler.php
<?php
/**
* @brief Profiler Active Record
* @author <a href='http://codingjungle.com'><NAME></a>
* @copyright (c) 2017 <NAME>
* @package IPS Social Suite
* @subpackage Storm
* @since -storm_since_version-
* @version 3.0.4
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Profiler
{
protected static $instance = null;
protected $countTab = 0;
protected $consoleTab = 0;
protected $dbQueriesList = '';
protected $dbQueriesTab = 0;
protected $memoryList = '';
protected $memoryTab = 0;
protected $memoryTotal = 0;
protected $fileTab = 0;
protected $totalTime = null;
protected $logMessage = '';
protected $logsTab = 0;
protected $logList = '';
protected $processedLogs = '';
protected $timeTab = 0;
protected $cacheList = '';
protected $cacheTab = 0;
protected $mstart = null;
protected $ttime = null;
protected $cacheLogs = [];
protected $dbLogs = [];
protected $speed = [];
protected $speedTab = 0;
protected $speedList = '';
protected $ipsLogsList = '';
protected $ipsLogsTab = 0;
protected $filesEnabled = true;
protected $logsEnabled = true;
protected $dbEnabled = true;
protected $dbEnabledSpeed = true;
protected $memEnabled = true;
protected $cacheEnabled = true;
protected $timeEnabled = true;
protected $type = 0;
final protected function __construct( $type )
{
if( defined( 'CJ_STORM_PROFILER_DISABLE_LOGS' ) and CJ_STORM_PROFILER_DISABLE_LOGS )
{
$this->logsEnabled = false;
}
if( defined( 'CJ_STORM_PROFILER_DISABLE_DB' ) and CJ_STORM_PROFILER_DISABLE_DB )
{
$this->dbEnabled = false;
}
if( defined( 'CJ_STORM_PROFILER_DISABLE_DB_SPEED' ) and CJ_STORM_PROFILER_DISABLE_DB_SPEED )
{
$this->dbEnabledSpeed = false;
}
if( defined( 'CJ_STORM_PROFILER_DISABLE_MEM' ) and CJ_STORM_PROFILER_DISABLE_MEM )
{
$this->memEnabled = false;
}
if( defined( 'CJ_STORM_PROFILER_DISABLE_CACHE' ) and CJ_STORM_PROFILER_DISABLE_CACHE )
{
$this->cacheEnabled = false;
}
if( defined( 'CJ_STORM_PROFILER_DISABLE_TIME' ) and CJ_STORM_PROFILER_DISABLE_TIME )
{
$this->timeEnabled = false;
}
if( defined( 'CJ_STORM_PROFILER_DISABLE_FILE' ) and CJ_STORM_PROFILER_DISABLE_FILE )
{
$this->filesEnabled = false;
}
}
public static function i( $type = 0 )
{
if( static::$instance === null )
{
static::$instance = new static( $type );
}
static::$instance->type = $type;
return static::$instance;
}
public function run()
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->fileTab();
$this->memoryTotal();
$this->totalTime();
if( $this->type == 1 )
{
if( isset( \IPS\Data\Store::i()->storm_bt ) )
{
unset( \IPS\Data\Store::i()->storm_bt );
}
if( isset( \IPS\Data\Store::i()->storm_cache ) )
{
unset( \IPS\Data\Store::i()->storm_cache );
}
}
\IPS\Data\Store::i()->storm_bt = $this->dbLogs;
\IPS\Data\Store::i()->storm_cache = $this->cacheLogs;
return \IPS\storm\Profiler\Template::i()->tabs();
}
}
public static function profilePassCheck()
{
if( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE )
{
$password = defined( 'CJ_STORM_PROFILER_PASS' ) ? CJ_STORM_PROFILER_PASS : md5( rand( 1, 100000 ) );
$pass = \IPS\Request::i()->profilerPass;
if( $password == $pass or ( isset( $_SESSION[ 'storm_profile' ] ) and $_SESSION[ 'storm_profile' ] ) )
{
if( !isset( $_SESSION[ 'storm_profile' ] ) )
{
$_SESSION[ 'storm_profile' ] = true;
}
return true;
}
}
}
protected function fileTab()
{
if( $this->filesEnabled )
{
$files = get_included_files();
$count = count( $files );
$this->fileTab = $count;
}
}
protected function memoryTotal( $object = null, $name = 'total' )
{
if( $this->memEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$mem = ( \is_object( $object ) ) ? mb_strlen( \serialize( $object ) ) : \memory_get_usage();
$mem = $this->formatBytes( $mem );
if( $name === 'total' )
{
$this->memoryTotal = $this->formatBytes( \memory_get_usage() );
}
$this->consoleTab++;
$this->memoryTab++;
$msg = \IPS\storm\Profiler\Template::i()->memory( [ 'name' => $name, 'msg' => '', 'memory' => $mem ] );
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'Memory', $msg, "Memory",
$this->oddEven( $this->memoryTab ) );
$this->memoryList = $msg . "\n" . $this->memoryList;
$this->processedLogs = $msg . "\n" . $this->processedLogs;
}
}
}
protected function formatBytes( $size, $precision = 2 )
{
$base = \log( $size, 1024 );
$suffixes = [ 'B', 'KB', 'MB', 'GB', 'TB' ];
return \round( \pow( 1024, $base - \floor( $base ) ), $precision ) . ' ' . $suffixes[ \floor( $base ) ];
}
public function oddEven( $num )
{
if( $num % 2 == 0 )
{
return "Even";
}
return "Odd";
}
protected function totalTime()
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->totalTime = round( microtime( true ) - $_SERVER[ "REQUEST_TIME_FLOAT" ], 4 );
if( is_array( $this->speed ) and count( $this->speed ) )
{
foreach( $this->speed as $num => $speed )
{
$percent = number_format( ( $speed / $this->totalTime ) * 100, 2 );
$this->processedLogs = str_replace( "##speed{$num}##", $percent, $this->processedLogs );
$this->speedList = str_replace( "##speed{$num}##", $percent, $this->speedList );
}
}
}
}
public function __get( $key )
{
if( property_exists( $this, $key ) )
{
return $this->{$key};
}
}
public function log( $message, $category = null )
{
if( $this->logsTab )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$exception_class = '';
$exception_code = '';
if( $message instanceof \Exception )
{
$exception_class = get_class( $message );
$exception_code = $message->getCode();
if( method_exists( $message, 'extraLogData' ) )
{
$msg = $message->extraLogData() . "\n" . $message->getMessage();
}
else
{
$msg = $message->getMessage();
}
$backtrace = $message->getTraceAsString();
}
else
{
if( is_array( $message ) )
{
$message = var_export( $message, true );
}
$msg = $message;
$backtrace = ( new \Exception )->getTraceAsString();
}
$final = [
'message' => $msg,
'backtrace' => $backtrace,
'exception_class' => $exception_class,
'exception_code' => $exception_code,
'category' => $category
];
$this->consoleTab++;
$this->ipsLogsTab++;
$msg = \IPS\storm\Profiler\Template::i()->log( $final );
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'Log', $msg, "IPS Log",
$this->oddEven( $this->ipsLogsTab ) );
$this->ipsLogsList = $msg . "\n" . $this->ipsLogsList;
$this->processedLogs = $msg . "\n" . $this->processedLogs;
}
}
}
public function dbQuery( $query, $time )
{
if( $this->dbEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->consoleTab++;
$this->dbQueriesTab++;
$hash = sha1( trim( $query[ 'backtrace' ] ) );
$this->dbLogs[ $hash ] = $query;
$msg = \IPS\storm\Profiler\Template::i()->db( [
'query' => $query[ 'query' ],
'backtrace' => $hash,
'time' => $time
] );
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'DbQueries', $msg, "DB Query",
$this->oddEven( $this->dbQueriesTab ) );
$this->dbQueriesList = $msg . "\n" . $this->dbQueriesList;
$this->processedLogs = $msg . "\n" . $this->processedLogs;
if( $this->dbEnabledSpeed )
{
$this->speed( $query[ 'query' ], $time );
}
}
}
}
public function speed( $for, $end )
{
if( $this->timeEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->consoleTab++;
$this->speedTab++;
$percent = "##speed{$this->speedTab}##";
$this->speed[ $this->speedTab ] = $end;
$msg = \IPS\storm\Profiler\Template::i()->speed( $for, $end, $percent );
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'Speed', $msg, "Execution Time",
$this->oddEven( $this->speedTab ) );
$this->speedList = $msg . "\n" . $this->speedList;
$this->processedLogs = $msg . "\n" . $this->processedLogs;
}
}
}
public function memoryStart()
{
if( $this->memEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->mstart = \memory_get_usage();
}
}
}
public function memoryEnd( $for, $msgs = '' )
{
if( $this->memEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
if( $this->mstart !== null )
{
$end = \memory_get_usage() - $this->mstart;
$this->consoleTab++;
$this->memoryTab++;
$msg = \IPS\storm\Profiler\Template::i()->memory( [
'name' => $for,
'msg' => $msgs,
'memory' => $this->formatBytes( $end )
] );
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'Memory', $msg, 'Memory',
$this->oddEven( $this->memoryTab ) );
$this->memoryList = $msg . "\n" . $this->memoryList;
$this->processedLogs = $msg . "\n" . $this->processedLogs;
}
$this->mstart = null;
}
}
}
public function cacheLog( $cache = [] )
{
if( $this->cacheEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->consoleTab++;
$this->cacheTab++;
$this->cacheLogs[ $this->cacheTab ] = $cache;
$msg = \IPS\storm\Profiler\Template::i()->cache( $cache[ 'type' ], $cache[ 'key' ], $this->cacheTab );
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'Cache', $msg, "Cache",
$this->oddEven( $this->cacheTab ) );
$this->cacheList = $msg . "\n" . $this->cacheList;
$this->processedLogs = $msg . "\n" . $this->processedLogs;
}
}
}
public function timeStart()
{
if( $this->timeEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
$this->ttime = microtime( true );
}
}
}
public function timeEnd( $for )
{
if( $this->timeEnabled )
{
if( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) )
{
if( $this->ttime !== null )
{
$end = microtime( true ) - $this->ttime;
$this->speed( $for, $end );
}
$this->ttime = null;
}
}
}
}
<file_sep>/interface/sync/task.php
<?php
require_once str_replace(
'applications/storm/interface/sync/sync.php', '',
str_replace( '\\', '/', __FILE__
)
) . 'init.php';
\IPS\Session\Front::i();
\IPS\storm\Sync::send();<file_sep>/sources/Headerdoc/HeaderdocAbstract.php
<?php
/**
* @brief HeaderdocAbstract Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.4
* @version -storm_version-
*/
namespace IPS\storm\Headerdoc;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _HeaderdocAbstract
{
/**
* finalize header Doc
**/
public function headerDocFinalize( $line, $application )
{
$line = preg_replace_callback( "#^.+?\s(?=namespace)#s", function( $m ) use ( $application )
{
$line = $m[ 0 ];
//author
$author = "<a href='" . $application->website . "'>" . $application->author . "</a>";
$line = preg_replace( '#@author([^\n]+)?#', "@author {$author}", $line );
//copyright
$copyright = "(c) " . ( new \DateTime )->format( "Y" ) . " " . $application->author;
$line = preg_replace( '#@copyright([^\n]+)?#', "@copyright {$copyright}", $line );
$line = preg_replace( '#@version([^\n]+)?#', "@version {$application->version}", $line );
return $line;
}, $line );
return $line;
}
/**
* since version, shouldn't be used unless you want the "since" version to change
**/
public function since( $application )
{
return $application->version;
}
}<file_sep>/modules/front/bitbucket/index.php
<?php
/**
* @brief Index Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.1
* @version -storm_version-
*/
namespace IPS\storm\modules\front\bitbucket;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* index
*/
class _index extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
parent::execute();
}
/**
* ...
*
* @return void
*/
protected function manage()
{
\IPS\storm\Bitbucket::createNewBitbucket();
\IPS\Output::i()->output = '';
}
// Create new methods with the same name as the 'do' parameter which should execute it
}<file_sep>/sources/Sync/Sync.php
<?php
/**
* @brief Sync Node
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since -storm_since_version-
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Sync extends \IPS\Node\Model
{
/**
* @brief [ActiveRecord] Multiton Store
*/
protected static $multitons;
/**
* @brief [ActiveRecord] Default Values
*/
protected static $defaultValues = null;
/**
* @brief [ActiveRecord] Database Table
*/
public static $databaseTable = 'storm_sync';
/**
* @brief [ActiveRecord] Database Prefix
*/
public static $databasePrefix = 'sync_';
/**
* @brief [ActiveRecord] ID Database Column
*/
public static $databaseColumnId = 'id';
/**
* @brief [ActiveRecord] Database ID Fields
*/
protected static $databaseIdFields = [ 'sync_id' ];
/**
* @brief [Node] Order Database Column
*/
public static $databaseColumnOrder = null;
/**
* @brief [Node] Parent ID Database Column
*/
public static $databaseColumnParent = null;
/**
* @brief [Node] Parent ID Root Value
* @note This normally doesn't need changing though some legacy areas use -1 to indicate a root node
*/
public static $databaseColumnParentRootValue = 0;
/**
* @brief [Node] Enabled/Disabled Column
*/
public static $databaseColumnEnabledDisabled = null;
/**
* @brief [Node] Show forms modally?
*/
public static $modalForms = false;
/**
* @brief [Node] Node Title
*/
public static $nodeTitle = 'Sync';
/**
* @brief [Node] ACP Restrictions
* @code
array(
* 'app' => 'core', // The application key which holds the restrictrions
* 'module' => 'foo', // The module key which holds the restrictions
* 'map' => array( // [Optional] The key for each restriction - can alternatively use
* "prefix"
* 'add' => 'foo_add',
* 'edit' => 'foo_edit',
* 'permissions' => 'foo_perms',
* 'delete' => 'foo_delete'
* ),
* 'all' => 'foo_manage', // [Optional] The key to use for any restriction not provided in the map
* (only needed if not providing all 4)
* 'prefix' => 'foo_', // [Optional] Rather than specifying each key in the map, you can specify
* a prefix, and it will automatically look for restrictions with the key "[prefix]_add/edit/permissions/delete"
* @encode
*/
protected static $restrictions = [
'app' => 'storm',
'module' => 'sync',
'prefix' => 'sync_',
];
/**
* @brief Bitwise values for members_bitoptions field
*/
public static $bitOptions = [
'bitoptions' => [
'bitoptions' => [],
],
];
/**
* @brief [Node] Title search prefix. If specified, searches for '_title' will be done against the language
* pack.
*/
public static $titleSearchPrefix = null;
/**
* @brief [Node] Title prefix. If specified, will look for a language key with "{$titleLangPrefix}_{$id}" as
* the key
*/
public static $titleLangPrefix = null;
/**
* @brief [Node] Prefix string that is automatically prepended to permission matrix language strings
*/
public static $permissionLangPrefix = '';
/**
* @brief [Node] Moderator Permission
*/
public static $modPerm = '';
/**
* @brief Follow Area Key
*/
public static $followArea = '';
/**
* @brief Cached URL
*/
protected $_url = null;
/**
* @brief URL Base
*/
public static $urlBase = '';
/**
* @brief URL Base
*/
public static $urlTemplate = '';
/**
* @brief SEO Title Column
*/
public static $seoTitleColumn = null;
/**
* @brief Content Item Class
*/
public static $contentItemClass = null;
public static function recieve()
{
$conf = \IPS\Settings::i();
$keyHash = sha1( $conf->getFromConfGlobal( 'base_url' ) . $conf->getFromConfGlobal( 'board_start' ) );
$key = \IPS\Request::i()->key;
$app = \IPS\Request::i()->app;
if( $key === $keyHash ) {
$app = \IPS\Application::load( $app );
$ftp = $conf->storm_ftp_path;
$path = $ftp . '/' . $app->directory . '.tar';
if( file_exists( $path ) ) {
/* Test the phar */
$application = new \PharData( $path, 0, null, \Phar::TAR );
/* Get app directory */
$appdata = json_decode( file_get_contents( "phar://" . $path . '/data/application.json' ), true );
$appDirectory = $appdata[ 'app_directory' ];
/* Extract */
$application->extractTo( \IPS\ROOT_PATH . "/applications/" . $appDirectory, null, true );
static::_checkChmod( \IPS\ROOT_PATH . '/applications/' . $appDirectory );
unset( $appdata[ 'app_directory' ], $appdata[ 'app_protected' ], $appdata[ 'application_title' ] );
foreach( $appdata as $column => $value ) {
$column = preg_replace( "/^app_/", "", $column );
$app->$column = $value;
}
$app->save();
/* Determine our current version and the last version we ran */
$currentVersion = $app->long_version;
$allVersions = $app->getAllVersions();
$longVersions = array_keys( $allVersions );
$humanVersions = array_values( $allVersions );
$lastRan = $currentVersion;
if( count( $allVersions ) ) {
$latestLVersion = array_pop( $longVersions );
$latestHVersion = array_pop( $humanVersions );
\IPS\Db::i()->insert( 'core_upgrade_history', [
'upgrade_version_human' => $latestHVersion,
'upgrade_version_id' => $latestLVersion,
'upgrade_date' => time(),
'upgrade_mid' => (int)\IPS\Member::loggedIn()->member_id,
'upgrade_app' => $app->directory,
] );
}
/* Now find any upgrade paths since the last one we ran that need to be executed */
$upgradeSteps = $app->getUpgradeSteps( $lastRan );
/* Did we find any? */
if( count( $upgradeSteps ) ) {
$_next = array_shift( $upgradeSteps );
$app->installDatabaseUpdates( $_next );
foreach( $upgradeSteps as $up ) {
/* Get the object */
$_className = "\\IPS\\{$$app->directory}\\setup\\upg_{$up}\\Upgrade";
$_methodName = "step1";
if( class_exists( $_className ) ) {
$upgrader = new $_className;
/* If the next step exists, run it */
if( method_exists( $upgrader, $_methodName ) ) {
$result = $upgrader->$_methodName();
/* If the result is 'true' we move on to the next step, otherwise we need to run the same step again and store the data returned */
if( $result === true ) {
$ranges = range( 2, 1000 );
foreach( $ranges as $range ) {
$next = 'step' . $range;
if( method_exists( $upgrader, $next ) ) {
$result = $upgrader->{$next}();
if( $result !== true ) {
break;
}
}
}
}
}
}
}
}
$app->installJsonData();
$app->installLanguages();
$app->installEmailTemplates();
$app->installSkins( true );
$app->installJavascript();
}
}
}
protected static function _checkChmod( $directory )
{
if( !is_dir( $directory ) ) {
throw new \UnexpectedValueException;
}
$it = new \RecursiveDirectoryIterator( $directory, \FilesystemIterator::SKIP_DOTS );
foreach( new \RecursiveIteratorIterator( $it ) AS $f ) {
if( $f->isDir() ) {
@chmod( $f->getPathname(), \IPS\IPS_FOLDER_PERMISSION );
} else {
@chmod( $f->getPathname(), \IPS\IPS_FILE_PERMISSION );
}
}
}
public static function send()
{
set_time_limit( 0 );
$trigger = [];
foreach( static::roots() as $site ) {
$trigger[] = [
'key' => $site->key,
'app' => $site->app,
'url' => $site->interface_host,
];
if( $site->ssh ) {
$ftp = new \IPS\Ftp\Sftp(
$site->host,
$site->username,
$site->pass,
$site->port ?: 22
);
} else {
$ftp = new \IPS\Ftp(
$site->host,
$site->username,
$site->pass,
$site->port ?: 21,
$site->secure,
$site->timeout
);
}
$application = \IPS\Application::load( $site->app );
$long = $application->long_version;
$human = $application->version;
$long++;
$human++;
$application->assignNewVersion( $long, $human );
try {
$application->build();
} catch( \Exception $e ) {
\IPS\Log::debug( $e );
throw $e;
}
try {
$pharPath = str_replace( '\\', '/', rtrim( \IPS\TEMP_DIRECTORY, '/' ) ) . '/' . $application->directory . ".tar";
$download = new \PharData( $pharPath, 0, $application->directory . ".tar", \Phar::TAR );
$download->buildFromIterator( new \IPS\Application\BuilderIterator( $application ) );
} catch( \PharException $e ) {
\IPS\Log::debug( $e );
throw $e;
}
$file = rtrim( \IPS\TEMP_DIRECTORY, '/' ) . '/' . $application->directory . ".tar";
$ftp->upload( $application->directory . ".tar", $file );
/* Cleanup */
unset( $download );
\Phar::unlinkArchive( $pharPath );
}
static::trigger( $trigger );
}
protected static function trigger( array $triggers )
{
if( is_array( $triggers ) and count( $triggers ) ) {
foreach( $triggers as $trigger ) {
$url = \IPS\Http\Url::external( $trigger[ 'url' ] );
$url->setQueryString( [ 'key' => $trigger[ 'key' ], 'app' => $trigger[ 'app' ] ] )->request( 2 )->get();
}
}
}
public function get__title()
{
$name = $this->host . ' ' . \IPS\Application::load( $this->app )->_title;
return $name;
}
/**
* [Node] Add/Edit Form
*
* @param \IPS\Helpers\Form $form The form
*
* @return void
*/
public function form( &$form )
{
$el[ 'prefix' ] = 'storm_ftp_';
$el[] = [
'name' => 'key',
'require' => true,
];
$el[] = [
'name' => 'interface_host',
'require' => true,
];
$el[] = [
'name' => 'app',
'class' => 'node',
'options' => [
'class' => 'IPS\Application',
'subnodes' => false,
],
'require' => true,
];
$el[] = [
'name' => 'host',
'require' => true,
];
$el[] = [
'name' => 'username',
];
$el[] = [
'name' => 'pass',
'class' => 'password',
'require' => true,
];
$el[] = [
'name' => 'port',
'class' => '#',
'default' => $this->port ?: 21,
];
$el[] = [
'name' => 'timeout',
'class' => '#',
'default' => $this->timeout ?: 10,
];
$el[] = [
'name' => 'secure',
'class' => 'yn',
];
$el[] = [
'name' => 'ssh',
'class' => 'yn',
];
$form = \IPS\storm\Forms::i( $el, $this, 'default', $form );
}
public static function topElements()
{
$conf = \IPS\Settings::i();
$key = sha1( $conf->getFromConfGlobal( 'base_url' ) . $conf->getFromConfGlobal( 'board_start' ) );
$e[] = [
'type' => 'dummy',
'name' => 'storm_remote_key_use',
'desc' => 'storm_remote_key_use_desc',
'default' => $key,
];
$e[] = [
'type' => 'dummy',
'name' => 'storm_remote_url',
'desc' => 'storm_remote_url_desc',
'default' => \IPS\Settings::i()->base_url . 'applications/storm/interface/sync/sync.php',
];
$e[] = [
'type' => 'dummy',
'name' => 'storm_cron_task',
'default' => '<strong>' . PHP_BINDIR . '/php -d memory_limit=-1 -d max_execution_time=0 ' . \IPS\ROOT_PATH . '/applications/storm/interface/sync/task.php' . '</strong>',
'desc' => 'storm_cron_task_desc',
];
$e[] = [
'name' => 'storm_ftp_path',
];
$form = \IPS\storm\Forms::i( $e, $conf );
if( $vals = $form->values() ) {
$form->saveAsSettings( $vals );
\IPS\Output::i()->redirect( \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=sync' ) );
}
return "<div class='ipsPad'>" . $form . "</div>";
}
/**
* [Node] Format form values from add/edit form for save
*
* @param array $values Values from the form
*
* @return array
*/
public function formatFormValues( $values )
{
if( $values[ 'storm_ftp_app' ] instanceof \IPS\Application ) {
$values[ 'storm_ftp_app' ] = $values[ 'storm_ftp_app' ]->directory;
}
$new = [];
foreach( $values as $key => $val ) {
$key = str_replace( 'storm_ftp_', '', $key );
$new[ $key ] = $val;
}
return $new;
}
/**
* [Node] Save Add/Edit Form
*
* @param array $values Values from the form
*
* @return void
*/
public function saveForm( $values )
{
parent::saveForm( $values );
}
}<file_sep>/hooks/themeCoreGlobalGlobal.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_themeCoreGlobalGlobal extends _HOOK_CLASS_
{
public static function hookData()
{
return parent::hookData();
}
public function includeCSS()
{
$parent = parent::includeCSS();
if ( \IPS\Settings::i()->storm_settings_tab_debug_css_alt ) {
foreach ( \IPS\Output::i()->cssFiles as $key => $val ) {
if ( mb_strpos( $val, 'query_log.css' ) !== false ) {
unset( \IPS\Output::i()->cssFiles[ $key ] );
}
}
$url = \IPS\storm\Settings::buildCss( \IPS\Output::i()->cssFiles );
return \IPS\Theme::i()->getTemplate( 'css', 'storm', 'front' )->css( $url );
}
else if ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE ) {
foreach ( \IPS\Output::i()->cssFiles as $key => $val ) {
if ( mb_strpos( $val, 'query_log.css' ) !== false ) {
unset( \IPS\Output::i()->cssFiles[ $key ] );
}
}
}
return $parent;
}
}
<file_sep>/modules/admin/configuration/sync.php
<?php
/**
* @brief Sync Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 3.0.7
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER['SERVER_PROTOCOL'] ) ? $_SERVER['SERVER_PROTOCOL'] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* sync
*/
class _sync extends \IPS\Node\Controller
{
/**
* Node Class
*/
protected $nodeClass = '\IPS\storm\Sync';
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'sync_manage' );
parent::execute();
}
public function manage(){
$class = $this->nodeClass;
$top = $class::topElements();
\IPS\Output::i()->sidebar[ 'actions' ][ 'sync' ] = [
'icon' => 'refresh',
'title' => 'Sync',
'link' => \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=sync&do=sync' ),
];
\IPS\Output::i()->output .= $top;
parent::manage();
}
}<file_sep>/hooks/coreModAdminAppDev.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_coreModAdminAppDev extends _HOOK_CLASS_
{
public function execute( $command = 'do' )
{
\IPS\Output::i()->jsVars[ 'storm_table_url' ] = (string) \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=settings' );
parent::execute( $command );
}
public function addVersionQuery()
{
\IPS\Output::i()->jsFiles = array_merge(
\IPS\Output::i()->jsFiles,
\IPS\Output::i()->js(
'admin_query.js',
'storm',
'admin'
)
);
$tables = \IPS\Db::i()->query( "SHOW TABLES" );
$t = [];
$t[ 0 ] = "Select Table";
foreach ( $tables as $table ) {
$foo = array_values( $table );
$t[ $foo[ 0 ] ] = $foo[ 0 ];
}
$el[ 'prefix' ] = 'storm_query_';
$el[] = [
'name' => "select",
'class' => "Select",
'required' => true,
'ops' => [
'options' => [
0 => "Select One",
"addColumn" => "Add Column",
// "changeColumn" => "Change Column",
"dropColumn" => "Drop Column",
"code" => "Code Box",
],
'toggles' => [
'code' => [
'code',
],
'dropColumn' => [
'table',
'columns',
],
'addColumn' => [
'table',
'add_column',
'type',
'length',
'decimals',
'default',
'comment',
'allow_null',
'unsigned',
'zerofill',
'auto_increment',
'binary',
'binary',
'values',
],
// 'changeColumn' => [
// 'table',
// 'columns'
// ],
],
],
];
$val = function ( $val ) {
/* Check it starts with \IPS\Db::i()-> */
$val = trim( $val );
if ( mb_substr( $val, 0, 14 ) !== '\IPS\Db::i()->' ) {
throw new \DomainException( 'versions_query_start' );
}
/* Check there's only one query */
if ( mb_substr( $val, -1 ) !== ';' ) {
$val .= ';';
}
if ( mb_substr_count( $val, ';' ) > 1 ) {
throw new \DomainException( 'versions_query_one' );
}
/* Check our Regex will be okay with it */
preg_match( '/^\\\IPS\\\Db::i\(\)->(.+?)\(\s*[\'"](.+?)[\'"]\s*(,\s*(.+?))?\)\s*;$/', $val, $matches );
if ( empty( $matches ) ) {
throw new \DomainException( 'versions_query_format' );
}
/* Run it if we're adding it to the current working version */
if ( \IPS\Request::i()->id == 'working' ) {
try {
try {
if ( @eval( $val ) === false ) {
throw new \DomainException( 'versions_query_phperror' );
}
} catch ( \ParseError $e ) {
throw new \DomainException( 'versions_query_phperror' );
}
} catch ( \IPS\Db\Exception $e ) {
throw new \DomainException( $e->getMessage() );
}
}
};
$el[] = [
'name' => 'code',
'class' => "TextArea",
'default' => '\IPS\Db::i()->',
'required' => true,
'v' => $val,
'ops' => [
'size' => 45,
],
];
if ( !isset( \IPS\Request::i()->storm_query_code ) or \IPS\Request::i()->storm_query_code != 'code' ) {
$el[] = [
'name' => "table",
'class' => "Select",
'required' => true,
'ops' => [
'options' => $t,
'parse' => 'raw',
],
];
$el[] = [
'name' => "columns",
'customClass' => "\\IPS\\storm\\Forms\\Select",
'ops' => [
'options' => [
],
],
];
$ints = [
'add_column',
'length',
'allow_null',
'default',
'comment',
'sunsigned',
'zerofill',
'auto_increment',
];
$decfloat = [
'add_column',
'length',
'decimals',
'allow_null',
'default',
'comment',
'sunsigned',
'zerofill',
];
$dates = [
'add_column',
'allow_null',
'default',
'comment',
];
$char = [
'add_column',
'length',
'allow_null',
'default',
'comment',
'binary',
];
$text = [
'add_column',
'allow_null',
'comment',
'binary',
];
$binary = [
'add_column',
'length',
'allow_null',
'default',
'comment',
];
$blob = [
'add_column',
'allow_null',
'comment',
];
$enum = [
'add_column',
'values',
'allow_null',
'default',
'comment',
];
$el[] = [
'class' => "Select",
'name' => "type",
'ops' => [
'options' => \IPS\Db::$dataTypes,
'toggles' => [
'TINYINT' => $ints,
'SMALLINT' => $ints,
'MEDIUMINT' => $ints,
'INT' => $ints,
'BIGINT' => $ints,
'DECIMAL' => $decfloat,
'FLOAT' => $decfloat,
'BIT' => [
'columns',
'length',
'allow_null',
'default',
'comment',
],
'DATE' => $dates,
'DATETIME' => $dates,
'TIMESTAMP' => $dates,
'TIME' => $dates,
'YEAR' => $dates,
'CHAR' => $char,
'VARCHAR' => $char,
'TINYTEXT' => $text,
'TEXT' => $text,
'MEDIUMTEXT' => $text,
'LONGTEXT' => $text,
'BINARY' => $binary,
'VARBINARY' => $binary,
'TINYBLOB' => $blob,
'BLOB' => $blob,
'MEDIUMBLOB' => $blob,
'BIGBLOB' => $blob,
'ENUM' => $enum,
'SET' => $enum,
],
],
];
$el[] = [
'name' => "add_column",
'required' => true,
'class' => "Text",
];
$el[] = [
'name' => 'values',
'class' => 'Stack',
];
$el[] = [
'name' => "length",
'class' => "Number",
'default' => 255,
];
$el[] = [
'name' => "allow_null",
'class' => "YesNo",
];
$el[] = [
'name' => 'decimals',
'class' => 'Number',
];
$el[] = [
'name' => "default",
'class' => "TextArea",
];
$el[] = [
'name' => "comment",
'class' => "TextArea",
];
$el[] = [
'name' => "sunsigned",
'class' => "YesNo",
];
$el[] = [
'name' => "zerofill",
'class' => "YesNo",
];
$el[] = [
'name' => "auto_increment",
'class' => "YesNo",
];
$el[] = [
'name' => "binary",
'class' => "YesNo",
];
$el[] = [
'name' => 'values',
'class' => "Stack",
];
}
$forms = \IPS\storm\Forms::i( $el, null, 'add_version_query', null, null, 'save', null,
[ 'data-controller' => 'storm.admin.query.query' ] );
/* If submitted, add to json file */
if ( $vals = $forms->values() ) {
/* Get our file */
$version = \IPS\Request::i()->id;
$json = $this->_getQueries( $version );
$install = $this->_getQueries( 'install' );
if ( $vals[ 'storm_query_select' ] != 'code' ) {
$type = $vals[ 'storm_query_select' ];
$table = $vals[ 'storm_query_table' ];
if ( $type == 'dropColumn' ) {
$column = $vals[ 'storm_query_columns' ];
$json[] = [ 'method' => $type, 'params' => [ $table, $column ] ];
\IPS\Db::i()->dropColumn( $table, $column );
}
else {
$column = $vals[ 'storm_query_add_column' ];
$schema = [];
$schema[ 'name' ] = $vals[ 'storm_query_add_column' ];
$schema[ 'type' ] = $vals[ 'storm_query_type' ];
if ( isset( $vals[ 'storm_query_length' ] ) and $vals[ 'storm_query_length' ] ) {
$schema[ 'length' ] = $vals[ 'storm_query_length' ];
}
else {
$schema[ 'length' ] = null;
}
if ( isset( $vals[ 'storm_query_decimals' ] ) and $vals[ 'storm_query_decimals' ] ) {
$schema[ 'decimals' ] = $vals[ 'storm_query_decimals' ];
}
else {
$schema[ 'decimals' ] = null;
}
if ( isset( $vals[ 'storm_query_values' ] ) and \count( $vals[ 'storm_query_values' ] ) ) {
$schema[ 'values' ] = $vals[ 'storm_query_values' ];
}
else {
$schema[ 'values' ] = null;
}
if ( isset( $vals[ 'storm_query_allow_null' ] ) and $vals[ 'storm_query_allow_null' ] ) {
$schema[ 'allow_null' ] = true;
}
else {
$schema[ 'allow_null' ] = false;
}
if ( isset( $vals[ 'storm_query_default' ] ) and $vals[ 'storm_query_default' ] ) {
$schema[ 'default' ] = $vals[ 'storm_query_default' ];
}
else {
$schema[ 'default' ] = null;
}
if ( isset( $vals[ 'storm_query_comment' ] ) and $vals[ 'storm_query_comment' ] ) {
$schema[ 'comment' ] = $vals[ 'storm_query_comment' ];
}
else {
$schema[ 'comment' ] = '';
}
if ( isset( $vals[ 'storm_query_sunsigned' ] ) and $vals[ 'storm_query_sunsigned' ] ) {
$schema[ 'unsigned' ] = $vals[ 'storm_query_sunsigned' ];
}
else {
$schema[ 'unsigned' ] = false;
}
if ( isset( $vals[ 'storm_query_zerofill' ] ) and $vals[ 'storm_query_zerofill' ] ) {
$schema[ 'zerofill' ] = $vals[ 'storm_query_zerofill' ];
}
else {
$schema[ 'zerofill' ] = false;
}
if ( isset( $vals[ 'storm_query_auto_increment' ] ) and $vals[ 'storm_query_auto_increment' ] ) {
$schema[ 'auto_increment' ] = $vals[ 'storm_query_auto_increment' ];
}
else {
$schema[ 'auto_increment' ] = false;
}
if ( isset( $vals[ 'storm_query_binary' ] ) and $vals[ 'storm_query_binary' ] ) {
$schema[ 'binary' ] = $vals[ 'storm_query_auto_increment' ];
}
else {
$schema[ 'binary' ] = false;
}
if ( $type == 'addColumn' ) {
$json[] = [ 'method' => $type, 'params' => [ $table, $schema ] ];
$install[] = [ 'method' => $type, 'params' => [ $table, $schema ] ];
$this->_writeQueries( 'install', $install );
\IPS\Db::i()->addColumn( $table, $schema );
}
else if ( $type == 'changeColumn' ) {
$json[] = [ 'method' => $type, 'params' => [ $table, $column, $schema ] ];
\IPS\Db::i()->changeColumn( $table, $column, $schema );
}
}
}
else {
/* Work out the different parts of the query */
$val = trim( $vals[ 'storm_query_code' ] );
if ( mb_substr( $val, -1 ) !== ';' ) {
$val .= ';';
}
preg_match( '/^\\\IPS\\\Db::i\(\)->(.+?)\(\s*(.+?)\s*\)\s*;$/', $val, $matches );
/* Add it on */
$json[] = [
'method' => $matches[ 1 ],
'params' => eval( 'return array( ' . $matches[ 2 ] . ' );' ),
];
}
/* Write it */
$this->_writeQueries( $version, $json );
/* Redirect us */
\IPS\Output::i()
->redirect( \IPS\Http\Url::internal( "app=core&module=applications&controller=developer&appKey={$this->application->directory}&tab=versions&root={$version}" ) );
}
\IPS\Output::i()->output = $forms;
}
protected function addTable()
{
$activeTab = \IPS\Request::i()->tab ?: 'new';
if ( $activeTab === "new" and isset( \IPS\Request::i()->storm_create_class ) and \IPS\Request::i()->storm_create_class !== "select" ) {
try {
$queriesJson = $this->_getQueries( 'working' );
$type = \IPS\Request::i()->storm_create_class;
$class = \IPS\storm\Classes::i();
$db = \IPS\Request::i()->database_table_name;
$prefix = \IPS\Request::i()->storm_class_prefix ?: '';
$data[ 'storm_class_type' ] = $type;
$data[ 'storm_class_className' ] = $db;
$data[ 'storm_class_prefix' ] = $prefix;
$data[ 'storm_class_database' ] = $this->application->directory . "_" . $db;
$data[ 'storm_class_item_node_class' ] = \IPS\Request::i()->storm_class_item_node_class ?: '';
$class->process( $data, $this->application );
if ( $prefix ) {
$prefix = $prefix . "_id";
}
else {
$prefix = "id";
}
$definition = [
'name' => $data[ 'storm_class_database' ],
'columns' => [
$prefix => [
'name' => $prefix,
'type' => 'BIGINT',
'length' => '20',
'unsigned' => true,
'zerofill' => false,
'binary' => false,
'allow_null' => false,
'default' => null,
'auto_increment' => true,
'comment' => \IPS\Member::loggedIn()->language()->get( 'database_default_column_comment' ),
],
],
'indexes' => [
'PRIMARY' => [
'type' => 'primary',
'name' => 'PRIMARY',
'columns' => [ $prefix ],
'length' => [ null ],
],
],
];
/* Create table */
\IPS\Db::i()->createTable( $definition );
/* Add to the queries.json file */
$queriesJson = $this->_addQueryToJson( $queriesJson,
[ 'method' => 'createTable', 'params' => [ $definition ] ] );
$this->_writeQueries( 'working', $queriesJson );
/* Add to schema.json */
$schema = $this->_getSchema();
$schema[ $definition[ 'name' ] ] = $definition;
$this->_writeSchema( $schema );
/* Redirect */
\IPS\Output::i()
->redirect( \IPS\Http\Url::internal( "app=core&module=applications&controller=developer&appKey={$this->application->directory}&do=editSchema&_name={$definition['name']}" ) );
} catch ( \Exception $e ) {
}
}
parent::addTable();
$form = new \IPS\Helpers\Form;
$options = [
'options' => [
"select" => "Select Class Type",
"ar" => "ActiveRecord",
"model" => "Node",
"item" => "Content Item",
"comment" => "Content Item Comment",
],
'toggles' => [
'ar' => [ 'js_storm_class_prefix' ],
'model' => [ 'js_storm_class_prefix' ],
'item' => [ 'js_storm_class_prefix', 'js_storm_class_item_node_class' ],
'comment' => [ 'js_storm_class_prefix', 'js_storm_class_item_node_class' ],
],
];
$select = new \IPS\Helpers\Form\Select( 'storm_create_class', null, false, $options, '', '', '', '' );
$prefix = new \IPS\Helpers\Form\Text( 'storm_class_prefix', null, false, [], '', '', '_',
'js_storm_class_prefix' );
$nodeItemClass = new \IPS\Helpers\Form\Text( 'storm_class_item_node_class', null, false, [], '', '', '',
'js_storm_class_item_node_class' );
$output = \IPS\Output::i()->output;
$add = $select->rowHtml( $form ) . $prefix->rowHtml( $form ) . $nodeItemClass->rowHtml( $form );
$output = preg_replace( '#<li class=[\'|"](.+?)[\'|"] id=[\'|"]database_table_new_database_table_name[\'|"]>#mu',
$add . '<li class="$1" id="database_table_new_database_table_name">', $output );
\IPS\Output::i()->output = $output;
}
protected function _manageClass()
{
return \IPS\storm\Classes::i()->form();
}
protected function _manageDevFolder()
{
return \IPS\storm\Classes\DevFolder::i()->form();
}
protected function _writeJson( $file, $data )
{
if( $file == \IPS\ROOT_PATH . "/applications/{$this->application->directory}/data/settings.json" ){
\IPS\storm\Proxyclass::i()->generateSettings();
}
parent::_writeJson( $file, $data );
}
}
<file_sep>/sources/Settings/Settings.php
<?php
/**
* @brief Settings Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.0
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Settings extends \IPS\Patterns\Singleton
{
protected static $instance = null;
public static function mbUcfirst( $string )
{
return mb_strtoupper( mb_substr( $string, 0, 1 ) ) . mb_substr( $string, 1 );
}
public static function buildCss( $css )
{
if( \IPS\Data\Store::i()->exists( 'dev_css' ) )
{
\IPS\Data\Store::i()->delete( 'dev_css' );
}
$url = null;
$files = [];
foreach( $css as $c )
{
$data = \IPS\Http\Url::external( $c );
$files[] = $data->queryString[ 'css' ];
}
\IPS\Data\Store::i()->dev_css = $files;
return str_replace( [ 'http://', 'https://' ], '//',
\IPS\Settings::i()->base_url ) . "applications/storm/interface/css/css.php";
}
public static function form()
{
$s = \IPS\Settings::i();
$form = \IPS\storm\Forms::i( static::elements(), $s );
if( $vals = $form->values() )
{
$form->saveAsSettings( $vals );
\IPS\Output::i()->redirect( \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=settings' ) );
}
return $form;
}
protected static function elements()
{
$e[] = [
'class' => "YesNo",
'name' => "storm_settings_tab_debug_templates",
'tab' => 'general'
];
$e[] = [
'class' => "YesNo",
'name' => "storm_settings_tab_debug_css"
];
$e[] = [
'class' => "YesNo",
'name' => "storm_settings_tab_debug_css_alt"
];
$e[] = [
'class' => "YesNo",
'name' => 'storm_profiler_is_fixed'
];
$e[] = [
'class' => 'YesNo',
'name' => 'storm_settings_disable_menu'
];
return $e;
}
}
<file_sep>/modules/front/general/general.php
<?php
/**
* @brief General Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 2.0.0
* @version -storm_version-
*/
namespace IPS\storm\modules\front\general;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* general
*/
class _general extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
parent::execute();
}
/**
* ...
*
* @return void
*/
protected function manage()
{
// This is the default method if no 'do' parameter is specified
}
// Create new methods with the same name as the 'do' parameter which should execute it
protected function backtrace()
{
$bt = \IPS\Request::i()->id;
$back = [];
if( \IPS\Data\Store::i()->exists( 'storm_bt' ) )
{
$back = \IPS\Data\Store::i()->storm_bt;
}
$output = "Nothing found";
if( isset( $back[ $bt ] ) )
{
$bt = $back[ $bt ];
$bt[ 'backtrace' ] = str_replace( "\\\\", "\\", $bt[ 'backtrace' ] );
$output = "<code>" . $bt[ 'query' ] . "</code><br><pre class=\"prettyprint lang-php \">" . $bt[ 'backtrace' ] . "</pre>";
}
\IPS\Output::i()->output = "<div class='ipsPad'>{$output}</div>";
}
protected function cache()
{
$bt = \IPS\Request::i()->id;
$back = [];
if( \IPS\Data\Store::i()->exists( 'storm_cache' ) )
{
$back = \IPS\Data\Store::i()->storm_cache;
}
$output = "Nothing found";
if( isset( $back[ $bt ] ) )
{
$bt = $back[ $bt ];
$bt[ 'backtrace' ] = str_replace( "\\\\", "\\", $bt[ 'backtrace' ] );
$output = "<div>Type: " . $bt[ 'type' ] . "</div><div>Key: " . $bt[ 'key' ] . "</div><br><pre class='prettyprint lang-php'>" . $bt[ 'backtrace' ] . "</pre>";
}
\IPS\Output::i()->output = "<div class='ipsPad'>{$output}</div>";
}
}
<file_sep>/hooks/multipleRedirect.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_multipleRedirect extends _HOOK_CLASS_
{
public function __construct( $url, $callback, $finished, $finalRedirect = true )
{
if( isset( \IPS\Request::i()->storm ) and \IPS\Request::i()->storm ) {
$url = $url->setQueryString( [ 'storm' => \IPS\Request::i()->storm ] );
$finished = function () {
$path = 'app=storm&module=configuration&controller=plugins';
$url = \IPS\Http\Url::internal( $path )->setQueryString( [
'storm' => \IPS\Request::i()->storm,
'do' => "doDev",
]
);
\IPS\Output::i()->redirect($url);
};
}
parent::__construct( $url, $callback, $finished, $finalRedirect );
}
}
<file_sep>/interface/debug/index.php
<?php
require_once str_replace( 'applications/storm/interface/debug/index.php', '',
str_replace( '\\', '/', __FILE__ ) ) . 'init.php';
\IPS\Session\Front::i();
//StormDebug::i()->run();
//function quit()
//{
// \IPS\Output::i()->json(['end' => 1]);
//}
//
//register_shutdown_function('quit');
$max = ( ini_get( 'max_execution_time' ) / 2 ) - 5;
$time = time();
while( true )
{
$ct = time() - $time;
if( $ct >= $max )
{
\IPS\Output::i()->json( [ 'end' => 1 ] );
}
$query = \IPS\Db::i()->select( '*', 'storm_debug', [ 'debug_ajax = ?', 1 ], 'debug_id ASC' );
if( count( $query ) )
{
$messages = new \IPS\Patterns\ActiveRecordIterator(
$query,
'IPS\storm\Debug'
);
$return = [];
foreach( $messages as $key => $val )
{
$msg = $val->dump;
$decoded = json_decode( $msg, true );
if( json_last_error() == JSON_ERROR_NONE )
{
$msg = $decoded;
}
$data = [
'type' => $val->type,
'message' => $msg
];
if( $val->bt )
{
$data[ 'bt' ] = $val->bt;
}
$return[] = $data;
$val->delete();
}
if( is_array( $return ) and count( $return ) )
{
\IPS\Output::i()->json( $return );
}
}
else
{
sleep( 1 );
continue;
}
}<file_sep>/hooks/Theme.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Theme extends _HOOK_CLASS_
{
static protected $clearDir = true;
public static function runProcessFunction( $content, $functionName )
{
if ( \IPS\Settings::i()->storm_settings_tab_debug_templates ) {
/* If it's already been built, we don't need to do it again */
if ( function_exists( 'IPS\Theme\\' . $functionName ) ) {
return;
}
/* Build Function */
$function = 'namespace IPS\Theme;' . "\n" . $content;
static::runDebugTemplate( $functionName, $function );
}
else {
parent::runProcessFunction( $content, $functionName );
}
}
protected static function runDebugTemplate( $functionName, $content )
{
if ( ( mb_strpos( $functionName, 'css_' ) === false and \IPS\Settings::i()->storm_settings_tab_debug_templates ) or ( mb_strpos( $functionName, 'css_' ) !== false and \IPS\Settings::i()->storm_settings_tab_debug_css ) ) {
$dir = \IPS\ROOT_PATH . "/StormTemplates";
if ( !is_dir( $dir ) ) {
@mkdir( $dir, 0777, true );
}
$content = "<?php\n" . $content;
$chash = md5( $content );
$fname = str_replace( "/", "_", $functionName );
$file = $dir . "/" . $fname . ".php";
$hash = false;
//$content = preg_replace( "#<!--(.*?)-->#", '', $content );
if ( file_exists( $file ) ) {
$hash = md5_file( $file );
}
$build = true;
if ( $hash ) {
if ( $hash == $chash ) {
$build = false;
}
}
if ( $build ) {
\file_put_contents( $file, $content );
}
include_once( $file );
}
else {
return parent::runDebugTemplate( $functionName, $content );
}
}
}
<file_sep>/sources/Classes/DevFolder.php
<?php
/**
* @brief DevFolder Singleton
* @author <a href='http://codingjungle.com'><NAME></a>
* @copyright (c) 2017 <NAME>
* @package IPS Social Suite
* @subpackage Storm
* @since -storm_since_version-
* @version 3.0.4
*/
namespace IPS\storm\Classes;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _DevFolder extends \IPS\Patterns\Singleton
{
/**
* @brief [ActiveRecord] Multiton Store
*/
protected static $instance = null;
protected $app = null;
protected $blansk = null;
public function form()
{
$this->app = \IPS\Application::load( \IPS\Request::i()->appKey );
$this->blanks = \IPS\ROOT_PATH . "/applications/storm/sources/Classes/blanks/";
$form = \IPS\storm\Forms::i( $this->elements() );
if( $vals = $form->values() )
{
$this->process( $vals );
}
return $form;
}
protected function elements()
{
$e = [];
$app = $this->app;
$validate = function( $data )
{
if( $data == "select" )
{
throw new \InvalidArgumentException( 'storm_classes_type_no_selection' );
}
};
$e[ 'prefix' ] = 'storm_devfolder_';
$e[] = [
'class' => "Select",
'name' => "type",
'default' => "select",
'required' => true,
'validation' => $validate,
'appearRequired' => true,
'options' => [
'options' => $this->classTypes(),
'toggles' => $this->toggles(),
],
];
$e[] = [
'class' => "Select",
'name' => 'loc',
'appearRequired' => true,
'validation' => function( $data )
{
if( $data == "select" )
{
throw new \InvalidArgumentException( 'storm_devfolder_loc_error' );
}
},
'options' => [
'options' => [ 'select' => "Select", 'admin' => 'Admin', 'front' => "Front", 'global' => "Global" ]
]
];
$e[] = [
'class' => "Text",
'name' => 'group',
'appearRequired' => true,
'validation' => function( $data )
{
if( !$data )
{
throw new \InvalidArgumentException( 'storm_devfolder_group_error' );
}
}
];
$fileValidation = function( $data ) use ( $app )
{
if( !$data )
{
throw new \InvalidArgumentException( 'storm_devfolder_filename_error' );
}
$devFolder = \IPS\ROOT_PATH . "/applications/{$app->directory}/dev/";
$file = "{$data}";
$type = \IPS\Request::i()->storm_devfolder_type;
$loc = \IPS\Request::i()->storm_devfolder_loc;
$group = \IPS\Request::i()->storm_devfolder_group;
if( $type === "template" )
{
$location = '/' . $loc . '/' . $group . '/';
$dir = $devFolder . "html" . $location;
$fileName = $dir . "{$file}.phtml";
}
$js = [
'jsModule',
'jsWidget',
'jsController'
];
if( in_array( $type, $js ) )
{
$location = "/" . $loc . "/controllers/" . $group . "/";
$dir = $devFolder . "js" . $location;
$fileName = $dir . "{$file}.js";
}
if( in_array( $type, $js ) )
{
$location = "/" . $loc . "/controllers/" . $group . "/";
$dir = $devFolder . "js" . $location;
if( !file_exists( $dir ) )
{
\mkdir( $dir, 0777, true );
}
$module = "ips.{$app->directory}.{$file}";
$fileName = $dir . $module . ".js";
if( $type === "jsController" )
{
$fileName = "ips." . $group . $file . ".js";
}
}
if( file_exists( $fileName ) )
{
throw new \InvalidArgumentException( 'storm_devfolder_filename_exist' );
}
};
$e[] = [
'class' => "Text",
'name' => 'filename',
'appearRequired' => true,
'validation' => $fileValidation
];
$e[] = [
'class' => "Text",
'name' => 'widgetname',
'appearRequired' => true,
'validation' => function( $data )
{
if( !$data )
{
throw new \InvalidArgumentException( 'storm_devfolder_widgetname_error' );
}
}
];
$e[] = [
'class' => "Stack",
'name' => "args"
];
return $e;
}
protected function classTypes()
{
return [
'select' => "Select",
'template' => "Template",
'jsWidget' => "JS Widget",
'jsModule' => "JS Module",
'jsController' => "JS Controller"
];
}
protected function toggles()
{
return [
'template' => [
'args',
'loc',
'group',
'filename'
],
'jsModule' => [
'loc',
'group',
'filename'
],
'jsWidget' => [
'loc',
'group',
'filename',
'widgetname'
],
'jsController' => [
'loc',
'group',
'filename'
],
];
}
protected function process( $vals )
{
$app = $this->app;
$blanks = $this->blanks;
$devFolder = \IPS\ROOT_PATH . "/applications/{$app->directory}/dev/";
$file = "{$vals['storm_devfolder_filename']}";
if( $vals[ 'storm_devfolder_type' ] === "template" )
{
$location = "/{$vals['storm_devfolder_loc']}/{$vals['storm_devfolder_group']}/";
$dir = $devFolder . "html" . $location;
if( !file_exists( $dir ) )
{
\mkdir( $dir, 0777, true );
}
$fileName = $dir . "{$file}.phtml";
$args = [];
if( is_array( $vals[ 'storm_devfolder_args' ] ) )
{
foreach( $vals[ 'storm_devfolder_args' ] as $v )
{
$args[] = "$" . str_replace( "$", "", $v );
}
$args = implode( ",", $args );
}
else
{
$args = '';
}
$content = \file_get_contents( $blanks . "template.txt" );
$content = str_replace( "#args#", $args, $content );
}
$js = [
'jsModule',
'jsWidget',
'jsController'
];
if( in_array( $vals[ 'storm_devfolder_type' ], $js ) )
{
$location = "/{$vals['storm_devfolder_loc']}/controllers/{$vals['storm_devfolder_group']}/";
$dir = $devFolder . "js" . $location;
if( !file_exists( $dir ) )
{
\mkdir( $dir, 0777, true );
}
$module = "ips.{$app->directory}.{$file}";
$fileName = $dir . $module . ".js";
if( $vals[ 'storm_devfolder_type' ] === "jsModule" or $vals[ 'storm_devfolder_type' ] === "jsController" )
{
if( $vals[ 'storm_devfolder_type' ] === "jsModule" )
{
$content = \file_get_contents( $blanks . "jsModule.txt" );
}
else if( $vals[ 'storm_devfolder_type' ] === "jsController" )
{
$module = "{$app->directory}.{$vals['storm_devfolder_loc']}.{$vals['storm_devfolder_group']}.{$file}";
$fileName = $dir . "ips.{$vals['storm_devfolder_group']}.{$file}.js";
$content = \file_get_contents( $blanks . "jsController.txt" );
}
$content = str_replace( "#module#", $module, $content );
}
else if( $vals[ 'storm_devfolder_type' ] === "jsWidget" )
{
$content = \file_get_contents( $blanks . "jsWidget.txt" );
$content = str_replace( [ '#widget#', '#widgetname#' ],
[ $module, $vals[ 'storm_devfolder_widgetname' ] ], $content );
}
}
$msg = \IPS\Member::loggedIn()->language()->addToStack( 'created', false, [ 'sprintf' => [ $fileName ] ] );
\file_put_contents( $fileName, $content );
\chmod( $fileName, 0777 );
\IPS\Output::i()
->redirect( \IPS\Http\Url::internal( "app=core&module=applications&controller=developer&appKey={$app->directory}&tab=DevFolder" ),
$msg );
}
}
<file_sep>/sources/Debug/Debug.php
<?php
/**
* @brief Debug Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.3
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Debug extends \IPS\Patterns\ActiveRecord
{
/**
* @brief [ActiveRecord] Database table
*/
public static $databaseTable = 'storm_debug';
/**
* @brief [ActiveRecord] Database Prefix
*/
public static $databasePrefix = "debug_";
/**
* @brief [ActiveRecord] ID Database Column
*/
public static $databaseColumnId = 'id';
/**
* @brief [ActiveRecord] Multiton Store
*/
protected static $multitons;
/**
* @brief [ActiveRecord] Database ID Fields
*/
protected static $databaseIdFields = [ 'debug_id' ];
/**
* @brief Bitwise keys
*/
protected static $bitOptions = [];
protected static $clear = true;
public static function both(
$message,
$logType = null,
$consoleType = "type",
$includeBackTrace = false,
$file = null,
$dir = null
) {
static::console( $message, $consoleType, $includeBackTrace );
static::log( $message, $logType, $file, $dir );
}
public static function console( $message, $type = "log", $includeBackTrace = false )
{
if( defined( 'CJ_STORM_DEBUG' ) and CJ_STORM_DEBUG )
{
if( !$message )
{
return;
}
$debug = new static;
if( is_array( $message ) )
{
$message = json_encode( $message );
}
$debug->time = time();
$debug->dump = $message;
$debug->type = $type;
if( $includeBackTrace )
{
$debug->bt = ( new \Exception )->getTraceAsString();
}
$debug->save();
}
}
public static function log( $message, $type = null, $file = null, $dir = null )
{
if( defined( 'CJ_STORM_DEBUG' ) and CJ_STORM_DEBUG )
{
if( !$message )
{
return;
}
$date = date( 'r' );
if( $message instanceof \Exception )
{
$message = $date . "\n" . get_class( $message ) . '::' . $message->getCode() . "\n" . $message->getMessage() . "\n" . $message->getTraceAsString();
}
else
{
if( is_array( $message ) )
{
$message = var_export( $message, true );
}
$message = $date . "\n" . $message . "\n" . ( new \Exception )->getTraceAsString();
}
if( $dir == null )
{
$dir = \IPS\ROOT_PATH . "/uploads/logs";
}
else
{
if( mb_strpos( $dir, \IPS\ROOT_PATH ) === false )
{
$dir = \IPS\ROOT_PATH . "/" . $dir;
}
}
if( !is_dir( $dir ) )
{
if( !@mkdir( $dir ) or !@chmod( $dir, \IPS\IPS_FOLDER_PERMISSION ) )
{
return;
}
}
if( $file == null )
{
$type = ( $type ) ? "_{$type}" : "";
$file = $dir . '/' . date( 'Y' ) . '_' . date( 'm' ) . '_' . date( 'd' ) . $type;
}
else
{
$type = ( $type ) ? "_{$type}" : "";
$file = $dir . '/' . $file . $type;
}
if( file_exists( $file ) )
{
@\file_put_contents( $file, "\n\n-------------\n\n" . $message, FILE_APPEND );
}
else
{
@\file_put_contents( $file, $message );
}
@chmod( $file, \IPS\IPS_FILE_PERMISSION );
}
}
public static function returnLog()
{
$return = [];
$return[] = "<script>";
$query = \IPS\Db::i()->select( '*', 'storm_debug', [ 'debug_ajax = ?', 0 ], 'debug_id ASC' );
if( count( $query ) )
{
$messages = new \IPS\Patterns\ActiveRecordIterator(
$query,
'IPS\storm\Debug'
);
foreach( $messages as $key => $val )
{
switch( $val->type )
{
default:
case 'log':
$return[] = "console.log('{$val->dump}');";
break;
case 'debug':
$return[] = "console.debug('{$val->dump}');";
break;
case 'dir':
$return[] = "console.dir('{$val->dump}');";
break;
case 'dirxml':
$return[] = "console.dirxml('{$val->dump}');";
break;
case 'error':
$return[] = "console.error('{$val->dump}');";
break;
case 'info':
$return[] = "console.info('{$val->dump}');";
break;
}
if( $val->bt )
{
$return[] = "console.log('{$val->bt}');";
}
$val->delete();
}
}
$return[] = "</script>";
return implode( "\n", $return );
}
public static function ajax( $msg, $type = "log", $includeBackTrace = false )
{
if( defined( 'CJ_STORM_DEBUG' ) and CJ_STORM_DEBUG )
{
if( !$msg )
{
return;
}
$debug = new static;
if( is_array( $msg ) )
{
$msg = json_encode( $msg );
}
$debug->dump = $msg;
$debug->type = $type;
$debug->ajax = 1;
$debug->time = time();
if( $includeBackTrace )
{
$debug->bt = ( new \Exception )->getTraceAsString();
}
$debug->save();
}
}
}<file_sep>/modules/admin/configuration/menu.php
<?php
/**
* @brief Menu Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 3.0.2
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER['SERVER_PROTOCOL'] ) ? $_SERVER['SERVER_PROTOCOL'] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* menu
*/
class _menu extends \IPS\Node\Controller
{
/**
* Node Class
*/
protected $nodeClass = '\IPS\storm\Menu';
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'menu_manage' );
parent::execute();
}
protected function foo(){
$sql = \IPS\Db::i()->select( '*', 'storm_menu', null, 'menu_order asc' );
$menus = new \IPS\Patterns\ActiveRecordIterator( $sql, 'IPS\storm\Menu');
$store = [];
foreach( $menus as $menu ){
$store[ $menu->parent ][] = $menu->foo();
}
print_r( json_encode( $store ) );exit;
}
protected function reorder()
{
$parent = parent::reorder();
if (\IPS\Request::i()->isAjax()) {
\IPS\storm\Menu::kerching();
}
return $parent;
}
}<file_sep>/hooks/Log.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Log extends _HOOK_CLASS_
{
public static function log( $message, $category = null )
{
if ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) {
\IPS\storm\Profiler::i()->log( $message, $category );
}
return parent::log( $message, $category );
}
}
<file_sep>/hooks/AbstractData.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
abstract class storm_hook_AbstractData extends _HOOK_CLASS_
{
protected $closedKeys = [
'storm_bt',
'storm_cache',
];
public function __get( $key )
{
if ( ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) ) {
if ( !isset( $this->_data[ $key ] ) ) {
if ( $this->exists( $key ) ) {
$cache = $this->get( $key );
$value = json_decode( $cache, true );
if ( !in_array( $key, $this->closedKeys ) ) {
\IPS\storm\Profiler::i()->cacheLog( [
'type' => 'get',
'key' => $key,
// 'cache' => var_export($cache, TRUE),
'backtrace' => var_export( debug_backtrace( DEBUG_BACKTRACE_IGNORE_ARGS ), true ),
] );
}
$this->_data[ $key ] = $value;
}
else {
throw new \OutOfRangeException;
}
}
return $this->_data[ $key ];
}
return parent::__get( $key );
}
public function __set( $key, $value )
{
if ( ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) ) {
if ( !in_array( $key, $this->closedKeys ) ) {
\IPS\storm\Profiler::i()->cacheLog( [
'type' => 'set',
'key' => $key,
// 'cache' => var_export($value, TRUE),
'backtrace' => var_export( debug_backtrace( DEBUG_BACKTRACE_IGNORE_ARGS ), true ),
] );
}
}
parent::__set( $key, $value );
}
public function storeWithExpire( $key, $value, \IPS\DateTime $expire, $fallback = false )
{
if ( ( ( defined( 'CJ_STORM_PROFILER' ) and CJ_STORM_PROFILER ) or ( defined( 'CJ_STORM_PROFILER_SAFE_MODE' ) and CJ_STORM_PROFILER_SAFE_MODE and \IPS\storm\Profiler::profilePassCheck() ) ) ) {
if ( !in_array( $key, $this->closedKeys ) ) {
\IPS\storm\Profiler::i()->cacheLog( [
'type' => 'set',
'key' => $key,
// 'cache' => var_export($value, TRUE),
'backtrace' => var_export( debug_backtrace( DEBUG_BACKTRACE_IGNORE_ARGS ) ),
] );
}
}
parent::storeWithExpire( $key, $value, $expire, $fallback );
}
}
<file_sep>/interface/logs/logs.php
<?php
require_once str_replace( 'applications/storm/interface/logs/logs.php', '',
str_replace( '\\', '/', __FILE__ ) ) . 'init.php';
\IPS\Session\Front::i();
$max = ( ini_get( 'max_execution_time' ) / 2 );
$time = (int)\IPS\Request::i()->time;
$alttime = time();
while( true )
{
$ct = ( time() - $alttime );
if( $ct >= $max )
{
\IPS\Output::i()->json( [ 'error' => 1, 'end' => $alttime, 'time' => time(), 'ct' => $ct ] );
}
$logs = '';
$query = \IPS\Db::i()->select( '*', "core_log", [ 'time >= ? ', $time ], 'id desc' );
if( count( $query ) )
{
$sql = new \IPS\Patterns\ActiveRecordIterator(
$query,
'IPS\Log'
);
$count = 0;
foreach( $sql as $log )
{
$msg = \IPS\storm\Profiler\Template::i()
->consoleContainer( 'Log',
\IPS\storm\Profiler\Template::i()->logObj( $log ), "IPS Log",
\IPS\storm\Profiler::i()->oddEven( $log->id ) );
$logs = $msg . "\n" . $logs;
$count++;
}
$logs = [
'html' => $logs,
'time' => time() + 1,
'count' => $count
];
\IPS\Output::i()->json( $logs );
}
else
{
sleep( 1 );
continue;
}
}
<file_sep>/modules/admin/configuration/plugins.php
<?php
/**
* @brief Plugins Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.8
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _plugins extends \IPS\Dispatcher\Controller
{
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'plugins_manage' );
parent::execute();
}
protected function manage()
{
$el = [
[
'name' => "storm_plugin_upload",
'class' => "Upload",
"required" => true,
"options" => [
'allowedFileTypes' => [ 'xml' ],
'temporary' => true,
],
]
];
$form = \IPS\storm\Forms::i( $el );
if( $vals = $form->values() )
{
$xml = new \IPS\Xml\XMLReader;
$xml->open( $vals[ 'storm_plugin_upload' ] );
if( !@$xml->read() )
{
\IPS\Output::i()->error( 'xml_upload_invalid', '2C145/D', 403, '' );
}
try
{
\IPS\Db::i()->select( 'plugin_id', 'core_plugins', [
'plugin_name=? AND plugin_author=?',
$xml->getAttribute( 'name' ),
$xml->getAttribute( 'author' )
] )->first();
$tempFileStir = tempnam( \IPS\TEMP_DIRECTORY, 'IPSStorm' );
move_uploaded_file( $vals[ 'storm_plugin_upload' ], $tempFileStir );
\IPS\Output::i()->redirect( $this->url->setQueryString( [
'do' => "doDev",
'storm' => $tempFileStir
] ) );
}
catch( \UnderflowException $e )
{
$tempFile = tempnam( \IPS\TEMP_DIRECTORY, 'IPS' );
move_uploaded_file( $vals[ 'storm_plugin_upload' ], $tempFile );
$secondTemp = tempnam( \IPS\TEMP_DIRECTORY, "Storm" );
copy( $tempFile, $secondTemp );
$url = \IPS\Http\Url::internal( 'app=core&module=applications&controller=plugins&do=doInstall' )
->setQueryString( [
'file' => $tempFile,
'key' => md5_file( $tempFile ),
'storm' => $secondTemp
] );
if( isset( \IPS\Request::i()->id ) )
{
$url = $url->setQueryString( 'id', \IPS\Request::i()->id );
}
\IPS\Output::i()->redirect( $url );
}
}
\IPS\Output::i()->title = \IPS\Member::loggedIn()->language()->addToStack( 'storm_plugins_title' );
\IPS\Output::i()->output = $form;
}
protected function doDev()
{
\IPS\storm\Plugins::i()->finish( \IPS\Request::i()->storm );
}
}<file_sep>/sources/Headerdoc/Headerdoc.php
<?php
/**
* @brief Headerdoc Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.0
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Headerdoc extends \IPS\Patterns\Singleton
{
public static $instance = null;
public function addIndexHtml( \IPS\Application $app )
{
$continue = false;
foreach( $app->extensions( 'storm', 'Headerdoc' ) as $class )
{
if( method_exists( $class, 'indexEnabled' ) )
{
$continue = $class->indexEnabled();
}
}
if( !$continue )
{
return;
}
$dir = \IPS\ROOT_PATH . "/applications/" . $app->directory;
$exclude = [
'.git',
'.idea',
'dev'
];
$filter = function( $file, $key, $iterator ) use ( $exclude )
{
if( !\in_array( $file->getFilename(), $exclude ) )
{
return true;
}
return false;
};
$dirIterator = new \RecursiveDirectoryIterator(
$dir,
\RecursiveDirectoryIterator::SKIP_DOTS
);
$iterator = new \RecursiveIteratorIterator(
new \RecursiveCallbackFilterIterator( $dirIterator, $filter ),
\RecursiveIteratorIterator::SELF_FIRST
);
foreach( $iterator as $iter )
{
if( $iter->isDir() )
{
$path = $iter->getPathname();
if( !file_exists( $path . "/index.html" ) )
{
\file_put_contents( $path . "/index.html", '' );
}
}
}
}
public function process( \IPS\Application $app )
{
if( !$this->can( $app ) )
{
return;
}
$dir = \IPS\ROOT_PATH . "/applications/" . $app->directory;
$subpackage = \IPS\Member::loggedIn()->language()->get( "__app_{$app->directory}" );
$exclude = [
'hooks',
'dev',
'data',
'3rdparty',
'3rd_party',
'vendor',
'.git',
'.idea'
];
$since = $app->version;
foreach( $app->extensions( 'storm', 'Headerdoc' ) as $class )
{
if( method_exists( $class, 'headerDocExclude' ) )
{
$exclude = array_merge( $exclude, $class->headerDocExclude() );
}
$reflector = new \ReflectionMethod( $class, 'since' );
$isProto = ( $reflector->getDeclaringClass()->getName() !== get_class( $class ) );
if( $isProto )
{
$since = $class->since( $app );
}
}
$filter = function( $file, $key, $iterator ) use ( $exclude )
{
if( !\in_array( $file->getFilename(), $exclude ) )
{
return true;
}
return false;
};
$dirIterator = new \RecursiveDirectoryIterator(
$dir,
\RecursiveDirectoryIterator::SKIP_DOTS
);
$iterator = new \RecursiveIteratorIterator(
new \RecursiveCallbackFilterIterator( $dirIterator, $filter ),
\RecursiveIteratorIterator::SELF_FIRST
);
$iterator = new \RegexIterator( $iterator, '/^.+\.php$/i', \RecursiveRegexIterator::GET_MATCH );
$regEx = '#(?:(?<!\w))(?:[^\w]|\s+)(?:(?:(?:abstract|final|static)\s+)*)(class|trait)\s+([-a-zA-Z0-9_]+)(?:\s+extends\s+([^\s]+))?#';
foreach( $iterator as $file )
{
try
{
$filePath = $file[ 0 ];
$line = \file_get_contents( $filePath );
preg_match( "#^.+?\s(?=namespace)#s", $line, $section );
if( isset( $section[ 0 ] ) )
{
preg_match( '#@since([^\n]+)?#', $section[ 0 ], $sinced );
}
else
{
$sinced = [];
}
if( !isset( $sinced[ 1 ] ) )
{
preg_match( "#^.+?\s(?=namespace)#s", $line, $section );
if( isset( $section[ 0 ] ) )
{
preg_match( '#@brief([^\n]+)?#', $section[ 0 ], $brief );
}
else
{
$brief = [];
}
if( !isset( $brief[ 1 ] ) )
{
$path = pathinfo( $filePath );
$type = $path[ 'dirname' ];
$type = str_replace( '\\', '/', $type );
$file = $path[ 'filename' ];
if( \mb_strpos( $filePath, "extensions" ) !== false )
{
$type = explode( '/', $type );
$extension = \IPS\storm\Settings::mbUcfirst( mb_strtolower( array_pop( $type ) ) );
$extApp = \IPS\storm\Settings::mbUcfirst( mb_strtolower( array_pop( $type ) ) );
$brief = $extApp . " " . $extension . " extension: " . \IPS\storm\Settings::mbUcfirst( $file );
}
else
{
$file = \IPS\storm\Settings::mbUcfirst( $file );
\preg_match(
$regEx,
$line,
$matches
);
if( isset( $matches[ 3 ] ) )
{
$brief = ( \mb_strpos( $matches[ 3 ],
"Model" ) !== false ) ? $file . " Node" : $file . " Class";
}
else
{
$brief = $file;
$brief .= ( isset( $matches[ 1 ] ) ) ? " " . \IPS\storm\Settings::mbUcfirst( $matches[ 1 ] ) : " Class";
}
}
$brief = \trim( $brief );
}
else
{
$brief = str_replace( ' ', '', trim( $brief[ 1 ] ) );
}
$replacement = <<<EOF
/**
* @brief {$brief}
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage {$subpackage}
* @since {$since}
* @version -storm_version-
*/
EOF;
$line = \preg_replace( "#^.+?\s(?=namespace)#s", "<?php\n\n$replacement\n\n", $line );
\file_put_contents( $filePath, $line );
}
else
{
$write = false;
$line = preg_replace_callback( "#^.+?\s(?=namespace)#s", function( $m ) use ( &$write, $since )
{
$line = $m[ 0 ];
//
preg_match( '#@since([^\n]+)?#', $line, $since );
if( isset( $since[ 1 ] ) and trim( $since[ 1 ] ) == '-storm_since_version-' )
{
$write = true;
$since = <<<EOF
@author {$since}
EOF;
$line = preg_replace( '#@author([^\n]+)?#', $since, $line );
}
//author
preg_match( '#@author([^\n]+)?#', $line, $auth );
if( isset( $auth[ 1 ] ) and trim( $auth[ 1 ] ) != '-storm_author-' )
{
$write = true;
$author = <<<EOF
@author -storm_author-
EOF;
$line = preg_replace( '#@author([^\n]+)?#', $author, $line );
}
//version
preg_match( '#@version([^\n]+)?#', $line, $ver );
if( isset( $ver[ 1 ] ) and trim( $ver[ 1 ] ) != '-storm_version-' )
{
$write = true;
$ver = <<<EOF
@version -storm_version-
EOF;
$line = preg_replace( '#@version([^\n]+)?#', $ver, $line );
}
//copyright
preg_match( '#@copyright([^\n]+)?#', $line, $cp );
if( isset( $cp[ 1 ] ) and trim( $cp[ 1 ] ) != '-storm_copyright-' )
{
$write = true;
$cpy = <<<EOF
@copyright -storm_copyright-
EOF;
$line = preg_replace( '#@copyright([^\n]+)?#', $cpy, $line );
}
return $line;
}, $line );
if( $write )
{
\file_put_contents( $filePath, $line );
}
}
}
catch( \Exception $e )
{
}
}
}
public function can( $app )
{
$continue = false;
foreach( $app->extensions( 'storm', 'Headerdoc' ) as $class )
{
if( method_exists( $class, 'headerDocEnabled' ) )
{
$continue = $class->headerDocEnabled();
}
}
return $continue;
}
}<file_sep>/modules/admin/configuration/proxyclass.php
<?php
/**
* @brief Proxyclass Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.9
* @version -storm_version-
*/
namespace IPS\storm\modules\admin\configuration;
/* To prevent PHP errors (extending class does not exist) revealing path */
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] : 'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
/**
* Proxyclass
*/
class _proxyclass extends \IPS\Dispatcher\Controller
{
/**
* Execute
*
* @return void
*/
public function execute()
{
\IPS\Dispatcher::i()->checkAcpPermission( 'Proxyclass_manage' );
parent::execute();
}
/**
* ...
*
* @return void
*/
protected function manage()
{
\IPS\Output::i()->title = \IPS\Member::loggedIn()->language()->addToStack( 'storm_proxyclass_title' );
\IPS\Output::i()->output = \IPS\Theme::i()
->getTemplate( 'proxyclass', 'storm', 'admin' )
->button( $this->url->setQueryString( [ 'do' => 'queue' ] ) );
}
// Create new methods with the same name as the 'do' parameter which should execute it
protected function queue()
{
\IPS\Output::i()->title = \IPS\Member::loggedIn()->language()->addToStack( 'storm_proxyclass_title' );
\IPS\Output::i()->output = new \IPS\Helpers\MultipleRedirect(
\IPS\Http\Url::internal( "app=storm&module=configuration&controller=proxyclass&do=queue" ),
function( $data )
{
if( !isset( $data[ 'total' ] ) )
{
$data = [];
$data[ 'total' ] = \IPS\storm\Proxyclass::i()->dirIterator();
$data[ 'current' ] = 0;
$data[ 'progress' ] = 0;
}
$run = \IPS\storm\Proxyclass::i()->run( $data );
if( $run == null )
{
return null;
}
else
{
$progress = isset( $run[ 'progress' ] ) ? $run[ 'progress' ] : 0;
if( $run[ 'total' ] and $run[ 'current' ] )
{
$progress = ( $run[ 'current' ] / $run[ 'total' ] ) * 100;
}
$language = \IPS\Member::loggedIn()->language()->addToStack( 'storm_proxyclass_progress', false,
[ 'sprintf' => [ $run[ 'current' ], $run[ 'total' ] ] ] );
return [
[
'total' => $run[ 'total' ],
'current' => $run[ 'current' ],
'progress' => $run[ 'progress' ]
],
$language,
$progress
];
}
},
function()
{
/* And redirect back to the overview screen */
\IPS\Output::i()
->redirect( \IPS\Http\Url::internal( 'app=storm&module=configuration&controller=proxyclass' ),
'storm_proxyclass_done' );
}
);
}
}<file_sep>/hooks/Plugin.php
//<?php
/* To prevent PHP errors (extending class does not exist) revealing path */
if ( !defined( '\IPS\SUITE_UNIQUE_KEY' ) ) {
exit;
}
class storm_hook_Plugin extends _HOOK_CLASS_
{
public static function addExceptionHandlingToHookFile( $file )
{
$hayStack = \file_get_contents( $file );
$haystack = \str_replace( [ "\n", "\r", "\r\n", "\t", " " ], '', $hayStack );
$needle = 'method_exists(get_parent_class(),__FUNCTION__)){returncall_user_func_array(\'parent::\'.__FUNCTION__,func_get_args())';
if ( \mb_strpos( $haystack, $needle ) !== false ) {
return $hayStack;
}
return parent::addExceptionHandlingToHookFile( $file );
}
public function save(){
parent::save();
\IPS\storm\Proxyclass::i()->generateSettings();
}
}
<file_sep>/sources/Classes/Classes.php
<?php
/**
* @brief Classes Class
* @author -storm_author-
* @copyright -storm_copyright-
* @package IPS Social Suite
* @subpackage Storm
* @since 1.0.3
* @version -storm_version-
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Classes extends \IPS\Patterns\Singleton
{
public static $instance = null;
protected $type = null;
protected $app = null;
protected $nameSpace = null;
protected $className = null;
protected $extends = '';
protected $implements = '';
protected $brief = null;
protected $application = null;
protected $blanks = null;
protected $formVersion = "1.0.6";
protected $prefix = '';
protected $database = '';
protected $nodeItemClass = '';
public function form()
{
$app = \IPS\Application::load( \IPS\Request::i()->appKey );
$form = \IPS\storm\Forms::i( $this->elements( $app ), null, 'classes' );
if( $vals = $form->values() )
{
$this->process( $vals, $app );
$msg = \IPS\Member::loggedIn()
->language()
->addToStack( 'storm_class_created', false,
[ 'sprintf' => [ $this->type, $this->className ] ] );
\IPS\Output::i()
->redirect( \IPS\Http\Url::internal( "app=core&module=applications&controller=developer&appKey={$app->directory}&tab=class" ),
$msg );
}
return $form;
}
protected function elements( \IPS\Application $app )
{
$class = function( $data ) use ( $app )
{
$class = \IPS\storm\Settings::mbUcfirst( \IPS\Request::i()->storm_class_namespace );
$data = \IPS\storm\Settings::mbUcfirst( $data );
if( $class )
{
$ns = "\\IPS\\" . $app->directory . "\\" . $class . "\\" . $data;
}
else
{
$ns = "\\IPS\\" . $app->directory . "\\" . $data;
}
if( $data != "Forms" )
{
if( class_exists( $ns ) )
{
throw new \InvalidArgumentException( 'storm_classes_class_no_exist' );
}
}
};
$extends = function( $data )
{
if( $data and !class_exists( $data ) )
{
throw new \InvalidArgumentException( 'storm_classes_extended_class_no_exist' );
}
};
$implements = function( $data )
{
if( is_array( $data ) and count( $data ) )
{
foreach( $data as $implement )
{
if( !class_exists( $implement ) )
{
throw new \InvalidArgumentException( 'storm_classes_implemented_no_interface' );
}
}
}
};
$validate = function( $data )
{
if( $data == "select" )
{
throw new \InvalidArgumentException( 'storm_classes_type_no_selection' );
}
};
$classType = [
'select' => "Select Type",
'normal' => "Class",
'singleton' => "Singleton",
'ar' => "Active Record",
'model' => "Node",
'item' => "Content Item",
'comment' => "Content Item Comment",
'forms' => "Forms Class",
];
$itemNodeValidation = function( $data ) use ( $app )
{
if( $data )
{
$class = "IPS\\{$app->directory}\\{$data}";
if( !class_exists( $class ) )
{
throw new \InvalidArgumentException( 'storm_class_node_item_missing' );
}
}
};
$toggles = [
'normal' => [
'namespace',
'className',
'extends',
'implements',
],
'singleton' => [
'namespace',
'className',
'implements',
],
'ar' => [
'namespace',
'className',
'database',
'prefix'
],
'model' => [
'namespace',
'className',
'implements',
'database',
'prefix'
],
'item' => [
'namespace',
'className',
'implements',
'item_node_class',
'database',
'prefix'
],
'comment' => [
'namespace',
'className',
'implements',
'item_node_class',
'database',
'prefix'
],
];
$el = [
[
'class' => "Select",
'name' => "type",
'default' => "select",
'required' => true,
'validation' => $validate,
'options' => [
'options' => $classType,
'toggles' => $toggles,
],
],
[
'class' => "Text",
'name' => 'namespace',
'options' => [
'placeholder' => "Namespace",
],
'prefix' => "IPS\\{$app->directory}\\",
],
[
'class' => "Text",
'name' => "className",
'required' => true,
'options' => [
'placeholder' => 'Class Name',
],
'validation' => $class,
],
[
'class' => "Text",
'name' => "database",
'appearRequired' => true,
'prefix' => $app->directory . '_',
'validation' => function( $data )
{
if( !$data )
{
throw new \InvalidArgumentException( 'storm_classes_no_blank' );
}
}
],
[
'class' => "Text",
'name' => "prefix",
'suffix' => "_"
],
[
'class' => "Text",
'name' => "item_node_class",
'appearRequired' => true,
'prefix' => "IPS\\{$app->directory}\\",
'validation' => $itemNodeValidation
],
[
'class' => "Text",
'name' => "extends",
'validation' => $extends,
],
[
'class' => "Stack",
'name' => "implements",
'validation' => $implements,
],
];
$el[ 'prefix' ] = 'storm_class_';
return $el;
}
public function process( $data, $app )
{
$this->blanks = \IPS\ROOT_PATH . "/applications/storm/sources/Classes/blanks/";
$this->type = $data[ 'storm_class_type' ];
$this->application = $app;
$this->app = \IPS\storm\Settings::mbUcfirst( $app->directory );
if( isset( $data[ 'storm_class_prefix' ] ) )
{
$this->prefix = $data[ 'storm_class_prefix' ] . "_";
}
if( isset( $data[ 'storm_class_database' ] ) )
{
$this->database = $app->directory.'_'.$data[ 'storm_class_database' ];
}
if( isset( $data[ 'storm_class_namespace' ] ) and $data[ 'storm_class_namespace' ] )
{
$this->nameSpace = 'IPS\\' . $app->directory . '\\' . $data[ 'storm_class_namespace' ];
}
else
{
$this->nameSpace = 'IPS\\' . $app->directory;
}
if( isset( $data[ 'storm_class_item_node_class' ] ) and $data[ 'storm_class_item_node_class' ] )
{
$nic = $data[ 'storm_class_item_node_class' ];
$this->nodeItemClass = "IPS\\{$app->directory}\\{$nic}";
}
if( isset( $data[ 'storm_class_className' ] ) )
{
$this->className = \IPS\storm\Settings::mbUcfirst( $data[ 'storm_class_className' ] );
}
else
{
$this->className = "Forms";
}
$ns = $this->nameSpace . "\\" . $this->className;
if( $this->className != "Forms" )
{
if( class_exists( $ns ) )
{
return;
}
}
$this->brief = \IPS\storm\Settings::mbUcfirst( $app->directory );
if( isset( $data[ 'storm_class_extends' ] ) and $data[ 'storm_class_extends' ] )
{
$this->extends = "extends " . $data[ 'storm_class_extends' ];
}
if( isset( $data[ 'storm_class_implements' ] ) and is_array( $data[ 'storm_class_implements' ] ) and count( $data[ 'storm_class_implements' ] ) )
{
//lets loop thru it to add in ln's and lets get rid of any dupes
foreach( $data[ 'storm_class_implements' ] as $imp )
{
$new[ $imp ] = $imp . "\n";
}
$this->implements = "implements " . rtrim( implode( ',', $new ) );
}
$template = $this->{$this->type}();
$dir = \IPS\ROOT_PATH . '/applications/' . $app->directory . '/sources/' . $this->getDir();
$file = $this->className . ".php";
$toWrite = $dir . '/' . $file;
if( !file_exists( $dir ) )
{
\mkdir( $dir, 0777, true );
}
\file_put_contents( $toWrite, $template );
\chmod( $toWrite, 0777 );
\IPS\storm\Proxyclass::i()->build( $toWrite );
}
protected function getDir()
{
$ns = explode( '\\', $this->nameSpace );
$ns = array_pop( $ns );
if( $ns == $this->application->directory )
{
return $this->className;
}
else
{
if( $ns != $this->className )
{
return $ns;
}
}
return $this->className;
}
protected function comment()
{
$path = $this->blanks . "comment.txt";
return $this->build( $path, 'Content Comment Class' );
}
protected function build( $path, $brief )
{
$content = \file_get_contents( $path );
return $this->replacementValues( $content, $brief );
}
protected function replacementValues( $content, $brief = "Class" )
{
$find = [
'#header#',
'#brief#',
'#app#',
'#applications#',
'#namespace#',
'#classname#',
'#extends#',
'#implements#',
'#permtype#',
'#database#',
'#prefix#',
'#nodeItemClass#',
'#module#'
];
$replacements = [
$this->buildHeader( $brief ),
$brief,
$this->app,
$this->application->directory,
$this->nameSpace,
$this->className,
$this->extends,
$this->implements,
\mb_strtolower( $this->className ),
$this->database,
$this->prefix,
$this->nodeItemClass,
\mb_strtolower($this->className )
];
return str_replace( $find, $replacements, $content );
}
protected function buildHeader( $brief )
{
$path = $this->blanks . "header.txt";
$content = \file_get_contents( $path );
if( $brief == "Forms Class" )
{
$content = str_replace( "*/", "* forms version {$this->formVersion}\n*/", $content );
}
return $content;
}
protected function item()
{
$path = $this->blanks . "item.txt";
return $this->build( $path, 'Content Item Class' );
}
protected function model()
{
$path = $this->blanks . "model.txt";
return $this->build( $path, 'Node' );
}
protected function ar()
{
$path = $this->blanks . "ar.txt";
return $this->build( $path, 'Active Record' );
}
protected function normal()
{
$path = $this->blanks . "normal.txt";
return $this->build( $path, 'Class' );
}
protected function forms()
{
$path = $this->blanks . "forms.txt";
return $this->build( $path, 'Forms Class' );
}
protected function singleton()
{
$path = $this->blanks . "singleton.txt";
return $this->build( $path, 'Singleton' );
}
}
<file_sep>/interface/css/css.php
<?php
require_once '../../../../init.php';
if( \IPS\IN_DEV !== true AND !\IPS\Theme::designersModeEnabled() )
{
exit();
}
try
{
/* The CSS is parsed by the theme engine, and the theme engine has plugins, and those plugins need to now which theme ID we're using */
if( \IPS\Theme::designersModeEnabled() )
{
\IPS\Session\Front::i();
}
$needsParsing = false;
$cs = [];
foreach( \IPS\Data\Store::i()->dev_css as $key => $val )
{
if( is_array( $val ) )
{
foreach( $val as $k => $v )
{
$cs[] = $v;
}
}
else
{
$cs[] = $val;
}
}
$cssF = implode( ',', $cs );
if( strstr( $cssF, ',' ) )
{
$contents = '';
foreach( explode( ',', $cssF ) as $css )
{
if( mb_substr( $css, -4 ) !== '.css' )
{
continue;
}
$names[] = $css;
$css = str_replace( \IPS\ROOT_PATH, '', $css );
$css = str_replace( '../', '../', $css );
$file = file_get_contents( \IPS\ROOT_PATH . '/' . $css );
$params = processFile( $file );
if( $params[ 'hidden' ] === 1 )
{
continue;
}
$contents .= "\n" . $file;
if( needsParsing( $css ) )
{
$needsParsing = true;
}
}
}
else
{
if( mb_substr( $cssF, -4 ) !== '.css' )
{
exit();
}
$names[] = $cssF;
$contents = file_get_contents( \IPS\ROOT_PATH . '/' . str_replace( '../', '../', $cssF ) );
$params = processFile( $contents );
if( $params[ 'hidden' ] === 1 )
{
exit;
}
if( needsParsing( $cssF ) )
{
$needsParsing = true;
}
}
if( $needsParsing )
{
if( \IPS\Theme::designersModeEnabled() )
{
/* If we're in designer's mode, we need to reset the theme ID based on the CSS path as we could be in the ACP which may have a different theme ID set */
preg_match( '#themes/(\d+)/css/(.+?)/(.+?)/(.*)\.css#', $cssF, $matches );
if( $matches[ 1 ] and $matches[ 1 ] !== \IPS\Theme::$memberTheme->id )
{
try
{
\IPS\Theme::$memberTheme = \IPS\Theme\Advanced\Theme::load( $matches[ 1 ] );
}
catch( \OutOfRangeException $ex )
{
}
}
}
$names = str_replace( [ '/', '.css', '-' ], '', implode( '', $names ) );
$functionName = 'css_' . md5( $names );
$contents = str_replace( '\\', '\\\\', $contents );
\IPS\Theme::makeProcessFunction( $contents, $functionName );
$functionName = "IPS\\Theme\\{$functionName}";
\IPS\Output::i()->sendOutput( $functionName(), 200, 'text/css' );
}
else
{
\IPS\Output::i()->sendOutput( $contents, 200, 'text/css' );
}
}
catch( \Exception $e )
{
\IPS\storm\Debug::log( $e );
}
/**
* Determine whether this file needs parsing or not
*
* @return boolean
*/
function needsParsing( $fileName )
{
if( \IPS\IN_DEV === true AND !\IPS\Theme::designersModeEnabled() )
{
preg_match( '#applications/(.+?)/dev/css/(.+?)/(.*)\.css#', $fileName, $matches );
}
else
{
preg_match( '#themes/(?:\d+)/css/(.+?)/(.+?)/(.*)\.css#', $fileName, $matches );
}
return count( $matches );
}
/**
* Process the file to extract the header tag params
*
* @return array
*/
function processFile( $contents )
{
$return = [ 'module' => '', 'app' => '', 'pos' => '', 'hidden' => 0 ];
/* Parse the header tag */
preg_match_all( '#^/\*<ips:css([^>]+?)>\*/\n#', $contents, $params, PREG_SET_ORDER );
foreach( $params as $id => $param )
{
preg_match_all( '#([\d\w]+?)=\"([^"]+?)"#i', $param[ 1 ], $items, PREG_SET_ORDER );
foreach( $items as $id => $attr )
{
switch( trim( $attr[ 1 ] ) )
{
case 'module':
$return[ 'module' ] = trim( $attr[ 2 ] );
break;
case 'app':
$return[ 'app' ] = trim( $attr[ 2 ] );
break;
case 'position':
$return[ 'pos' ] = intval( $attr[ 2 ] );
break;
case 'hidden':
$return[ 'hidden' ] = intval( $attr[ 2 ] );
break;
}
}
}
return $return;
}<file_sep>/sources/Generator/Generator.php
<?php
/**
* @brief Generator Active Record
* @author <a href='http://codingjungle.com'><NAME></a>
* @copyright (c) 2017 <NAME>
* @package IPS Social Suite
* @subpackage Storm
* @since -storm_since_version-
* @version 3.0.4
*/
namespace IPS\storm;
if( !defined( '\IPS\SUITE_UNIQUE_KEY' ) )
{
header( ( isset( $_SERVER[ 'SERVER_PROTOCOL' ] ) ? $_SERVER[ 'SERVER_PROTOCOL' ] :
'HTTP/1.0' ) . ' 403 Forbidden' );
exit;
}
class _Generator extends \IPS\Patterns\ActiveRecord
{
/**
* @brief [ActiveRecord] Database Prefix
*/
public static $databasePrefix = 'generator_';
/**
* @brief [ActiveRecord] ID Database Column
*/
public static $databaseColumnId = 'id';
/**
* @brief [ActiveRecord] Database table
*/
public static $databaseTable = 'storm_generator';
/**
* @brief [ActiveRecord] Database ID Fields
* @note If using this, declare a static $multitonMap = array(); in the child class to prevent duplicate loading queries
*/
protected static $databaseIdFields = [ 'id' ];
/**
* @brief Bitwise keys
*/
protected static $bitOptions = array();
/**
* @brief [ActiveRecord] Multiton Store
*/
protected static $multitons;
public static function create( $type, $id )
{
$d = new static;
$d->type = $type;
$d->gid = $id;
$d->save();
}
public function process()
{
try
{
switch( $this->type )
{
case "members":
$d = \IPS\Member::load( $this->gid );
$d->delete();
break;
case "forums":
$d = \IPS\forums\Forum::load( $this->gid );
$d->delete();
break;
case "topics":
$d = \IPS\forums\Topic::load( $this->gid );
$d->delete();
break;
case "posts":
$d = \IPS\forums\Topic\Post::load( $this->gid );
$d->delete();
break;
}
}
catch( \Exception $e )
{
}
$this->delete();
}
} | 85f2c1c97f66b57a5e10a5a844618c2ac77456a6 | [
"JavaScript",
"PHP"
] | 52 | PHP | MartinAronsen/storm | b820e56b1823e0ea7be900b206766db7700be4e0 | 021a35c729e07234f2e803d17b7bd94b9880f7a8 | |
refs/heads/master | <file_sep>//引入events模块
var events = require("events");
//创建事件处理程序
var eventEmitter = new events.EventEmitter();
//创建事件处理程序
var connectHandler = function connected() {
console.log("链接成功");
//触发data_received 事件
eventEmitter.emit("data_receive")
}
//绑定connection事件
eventEmitter.on('connection',connectHandler);
//使用匿名函数绑定data_receive事件
eventEmitter.on('data_receive',function(){
console.log('数据接收成功。')
})
//触发connection事件
eventEmitter.emit('connection');
console.log("事件执行完毕。")<file_sep># node笔记
(阅读地址:[node教程](http://fis.baidu.com/fis3/docs/beginning/intro.html))
# 应用创建
## 引入模块
require 指令来载入 http 模块,并将实例化的 HTTP 赋值给变量 http,实例如下:
```
var http = require("http");
```
## 创建服务器
使用 http.createServer() 方法创建服务器,并使用 listen 方法绑定 8888 端口。 函数通过 request, response 参数来接收和响应数据。
实例如下,在你项目的根目录下创建一个叫 server.js 的文件,并写入以下代码:
```
var http = require('http');
http.createServer(function (request, response) {
// 发送 HTTP 头部
// HTTP 状态值: 200 : OK
// 内容类型: text/plain
response.writeHead(200, {'Content-Type': 'text/plain'});
// 发送响应数据 "Hello World"
response.end('Hello World\n');
}).listen(8888);
// 终端打印如下信息
console.log('Server running at http://127.0.0.1:8888/');
```
打开浏览器8888端口将看到返回的数据
# npm使用介绍
NPM是随同NodeJS一起安装的包管理工具,能解决NodeJS代码部署上的很多问题,常见的使用场景有以下几种:
允许用户从NPM服务器下载别人编写的第三方包到本地使用。
允许用户从NPM服务器下载并安装别人编写的命令行程序到本地使用。
允许用户将自己编写的包或命令行程序上传到NPM服务器供别人使用。
## 基础使用说明
由于新版的nodejs已经集成了npm,所以之前npm也一并安装好了。同样可以通过输入 "npm -v" 来测试是否成功安装。命令如下,出现版本提示表示安装成
如果你安装的是旧版本的 npm,可以很容易得通过 npm 命令来升级,命令如下:
```
npm install npm -g
```
查看全局安装的命令:
```
$ npm ls -g
```
## Package.json 属性说明
name - 包名。
version - 包的版本号。
description - 包的描述。
homepage - 包的官网 url 。
author - 包的作者姓名。
contributors - 包的其他贡献者姓名。
dependencies - 依赖包列表。如果依赖包没有安装,npm 会自动将依赖包安装在 node_module 目录下。
repository - 包代码存放的地方的类型,可以是 git 或 svn,git 可在 Github 上。
main - main 字段是一个模块ID,它是一个指向你程序的主要项目。就是说,如果你包的名字叫 express,然后用户安装它,然后require("express")。
keywords - 关键字
## 卸载模块
```
$ npm update express
```
## 搜索模块
```
$ npm search express
```
# Node.js 回调函数
Node.js 异步编程的直接体现就是回调。
异步编程依托于回调来实现,但不能说使用了回调后程序就异步化了。
回调函数在完成任务后就会被调用,Node 使用了大量的回调函数,Node 所有 API 都支持回调函数。
例如,我们可以一边读取文件,一边执行其他命令,在文件读取完成后,我们将文件内容作为回调函数的参数返回。这样在执行代码时就没有阻塞或等待文件 I/O 操作。这就大大提高了 Node.js 的性能,可以处理大量的并发请求。
## 阻塞代码实例
创建一个文件 input.txt
创建 main.js 文件, 代码如下:
```
var fs = require("fs");
var data = fs.readFileSync('input.txt');
console.log(data.toString());
console.log("程序执行结束!");
```
## 非阻塞代码实例
```
var fs = require("fs");
fs.readFile('input.txt', function (err, data) {
if (err) return console.error(err);
console.log(data.toString());
});
console.log("程序执行结束!");
```
# 事件循环
Node.js 是单进程单线程应用程序,但是通过事件和回调支持并发,所以性能非常高。
Node.js 的每一个 API 都是异步的,并作为一个独立线程运行,使用异步函数调用,并处理并发。
Node.js 基本上所有的事件机制都是用设计模式中观察者模式实现。
Node.js 单线程类似进入一个while(true)的事件循环,直到没有事件观察者退出,每个异步事件都生成一个事件观察者,如果有事件发生就调用该回调函数.
[什么是进程,什么事线程](http://www.ruanyifeng.com/blog/2013/04/processes_and_threads.html)
## 事件驱动程序

理解:事件产生器发送事件,事件循环器往事件处理器里面加事件,事件形成队列,当某个事件执行完的时候,就触发某个事件对于的回调函数。
node.js 有多个内置模块,事件模块是其中之一,可以通过引入events模块,并通过实例化eventemitter(事件产生器)类来绑定和监听事件实例如下:
```
//引入events模块
var events = require('events');
//创建事件发射器
var eventEmitter = new events.EventEmitter();
```
绑定事件处理程序
```
eventEmitter.on("eventName",enentHandler)
```
触发事件
```
eventEmitter.emit("eventName");
```
## 实例
创建mian.js文件
```
//引入events模块
var events = require("events");
//创建事件处理程序
var eventEmitter = new events.EventEmitter();
//创建事件处理程序
var connectHandler = function connected() {
console.log("链接成功");
//触发data_received 事件
eventEmitter.emit("data_receive")
}
//绑定connection事件
eventEmitter.on('connection',connectHandler);
//使用匿名函数绑定data_receive事件
eventEmitter.on('data_receive',function(){
console.log('数据接收成功。')
})
//触发connection事件
eventEmitter.emit('connection');
console.log("事件执行完毕。")
```
[main.js文件地址](main.js)
## node应用程序是如何工作的
在 Node 应用程序中,执行异步操作的函数将回调函数作为最后一个参数, 回调函数接收错误对象作为第一个参数。
接下来让我们来重新看下前面的实例,创建一个 input.txt ,文件内容如下:
```
晓军博客地址:www.wangxiaojun.top
```
创建read.js代码如下:
```
```
[read.js文件地址](read.js)
以上程序中 fs.readFile() 是异步函数用于读取文件。 如果在读取文件过程中发生错误,错误 err 对象就会输出错误信息。
如果没发生错误,readFile 跳过 err 对象的输出,文件内容就通过回调函数输出。
执行以上代码,执行结果如下:
```
程序执行完毕!
晓军博客地址:www.wangxiaojun.top
```
接下来我们删除 input.txt 文件,执行结果如下所示:
```
程序执行完毕
Error: ENOENT, open 'input.txt'
```
# EventEmitter
ndoe.js所有的异步i/o操作在完成时都会发送一个事件到事件队列。
node.js里面的许多对象都会分发事件:一个net.server对象会在每次有新连接时分发一个事件, 一个fs.readStream对象会在文件被打开的时候发出一个事件。 所有这些产生事件的对象都是 events.EventEmitter 的实例。
理解:就像一个女人生了一个机器人,在某个指令下达的时候,他从事一些事情,等等,等同于java中的一个类;注意,他没有自由的权利,得手动触发。
## EventEmitter 类
EventEmitter 对象如果在实例化时发生错误,会触发 'error' 事件。当添加新的监听器时,'newListener' 事件会触发,当监听器被移除时,'removeListener' 事件被触发。
下面我们用一个简单的例子说明 EventEmitter 的用法:
```
var EventEmitter = require('events').EventEmitter;
var event=new EventEmitter();
event.on("some_event",function(){
console.log('some_event事件触发')
});
setTimeout(function(){
event.emit("some_event")
},1000)
```
[event.js文件地址](event.js)
# Node.js Buffer
JavaScript 语言自身只有字符串数据类型,没有二进制数据类型。
但在处理像TCP流或文件流时,必须使用到二进制数据。因此在 Node.js中,定义了一个 Buffer 类,该类用来创建一个专门存放二进制数据的缓存区。
在 Node.js 中,Buffer 类是随 Node 内核一起发布的核心库。Buffer 库为 Node.js 带来了一种存储原始数据的方法,可以让 Node.js 处理二进制数据,每当需要在 Node.js 中处理I/O操作中移动的数据时,就有可能使用 Buffer 库。原始数据存储在 Buffer 类的实例中。一个 Buffer 类似于一个整数数组,但它对应于 V8 堆内存之外的一块原始内存。
## 创建buffer类
### 方法一
创建10字节的Buffer实例
```
var buf=new Buffer(10)
```
### 方法二
给给定的数组创建buffer实例
```
var buf=new Buffer([10, 20, 30, 40, 50])
```
### 方法三
通过字符串来创建Buffer
```
var buf=new Buffer("www.wangxiaojun.top")
```
utf-8 是默认的编码方式,此外它同样支持以下编码:"ascii", "utf8", "utf16le", "ucs2", "base64" 和 "hex"。
## 写入缓存区
### 语法
```
buf.write(string,index,length,encoding)
```
### 参数说明
1.string 要写入的字符串
2.index 索引,默认为0
3.length 默认为写入字符串的长度
4.编码 默认为utf-8
### 返回值
返回实际写入的大小,如果buffer控件不足,只会写入部分字符串。
实例 buf.js
```
var buf=new Buffer(256);
var len=buf.write("www.wangxiaojun.top");
console.log("写入自己数:"+len)
```
[查看实例](buf.js)
## 从缓冲区读取数据
### 语法
```
buf.toString(encoding,start,end)
```
### 参数说明
1.encoding 编码 默认utf-8
2.start 开始读取位置 默认开始读取的索引位置
3.end 结束读取位置 默认结束读取的索引位置
### 实例
```
var buf=new Buffer(26);
for(var i=0;i<26;i++){
buf[i]=i+97
};
console.log(buf.toString("ascii"));
console.log(buf.toString("ascii",0,5));
console.log(buf.toString("utf-8",0,5));
console.log(buf.toString(undefined,0,5));
```
[实例查看](readbuf.js)
## 将buffer转为JSON对象
### 语法
```
buf.toJSON();
```
### 实例
```
var buf=new Buffer("www.wangxiaojun.top");
var json = buf.toJSON();
console.log(json)
```
[实例查看](jsonbuf)
# Node.js Stream
## Node.js Stream(流)
Stream 是一个抽象接口,Node 中有很多对象实现了这个接口。例如,对http 服务器发起请求的request 对象就是一个 Stream,还有stdout(标准输出)。(小水管~哈哈)
Node.js,Stream 有四种流类型:
Readable - 可读操作。
Writable - 可写操作。
Duplex - 可读可写操作.
Transform - 操作被写入数据,然后读出结果。
所有的 Stream 对象都是 EventEmitter 的实例。常用的事件有:
data - 当有数据可读时触发。
end - 没有更多的数据可读时触发。
error - 在接收和写入过程中发生错误时触发。
finish - 所有数据已被写入到底层系统时触发。
## 从流中读取数据
创建stream.js文件,代码如下:
```
var fs=require("fs");
var data='';
//创建可读流
var readStream = fs.createReadStream("input.txt");
//设置编码为utf-8
readStream.setEncoding("UTF8");
//处理流事件
readStream.on("data",function(chunk){
data+=chunk;
});
readStream.on("end",function(){
console.log(data)
});
readStream.on('error',function(err){
console.log(err)
});
console.log("程序执行完毕")
```
[文件预览](stream.js)
## 写入流
创建instream文件,代码如下:
```
var fs=require("fs");
var data="晓军博客地址:www.wangxiaojun.top";
//创建一个可写入的流到output.txt中
var writeStream = fs.createWriteStream("output.txt");
//使用utf8写入数据
writeStream.write(data,"UTF8");
//标记文件末尾
writeStream.end();
//处理事件流
writeStream.on("finish",function(){
console.log("写入完成")
})
writeStream.on("error",function(err){
console.log(err)
})
console.log("程序执行完毕")
```
[instream预览](instream.js)
## 管道流
提供一个数据从一个载体到另一个载体的管道。

如上面的图片所示,我们把文件比作装水的桶,而水就是文件里的内容,我们用一根管子(pipe)连接两个桶使得水从一个桶流入另一个桶,这样就慢慢的实现了大文件的复制过程。
以下实例我们通过读取一个文件内容并将内容写入到另外一个文件中。
```
var fs=require("fs");
//创建一个可读流
var readStream=fs.createReadStream("input.txt");
//创建一个可写流
var writeStream=fs.createWriteStream("pipe.txt");
//管道操作读写 将input.txt写入到pite.txt
readStream.pipe(writeStream);
console.log("程序执行完毕!")
```
[pipe.js预览](pipe.js)
## 链式流
链式是通过连接输出流到另外一个流并创建多个对个流操作链的机制。链式流一般用于管道操作。
接下来我们就是用管道和链式来压缩和解压文件。
创建 compress.js 文件, 代码如下:
```
var fs=require("fs");
var zlib=require("zlib");
//压缩input.txt文件为 input.txt.gz
fs.createReadStream("input.txt")
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream("input.txt.gz"));
console.log("程序执行完毕!")
```
# nodejs模块系统
为了让Node.js的文件可以相互调用,Node.js提供了一个简单的模块系统。
模块是Node.js 应用程序的基本组成部分,文件和模块是一一对应的。换言之,一个 Node.js 文件就是一个模块,这个文件可能是JavaScript 代码、JSON 或者编译过的C/C++ 扩展。
注解:有独立功能的一个块。
## 创建模块
例如:
```
// hello.js
function hello (){
var name;
this.setName=function(nName){
name=nName;
};
this.sayHello=function(){
console.log("hello "+name)
}
};
module.exports=hello;
```
[hello.js](hello.js)
## 调用模块
例如:
```
// module.js
var Hello=require("./hello");
var hello=new Hello();
hello.setName("wangxiaojun");
hello.sayHello();
```
[module.js](module.js)
## 服务端的模块放在哪里
Node.js中自带了一个叫做"http"的模块,我们在我们的代码中请求它并把返回值赋给一个本地变量。
这把我们的本地变量变成了一个拥有所有 http 模块所提供的公共方法的对象。
Node.js 的 require方法中的文件查找策略如下:
由于Node.js中存在4类模块(原生模块和3种文件模块),尽管require方法极其简单,但是内部的加载却是十分复杂的,其加载优先级也各自不同。如下图所示:

三种文件模块:.js。通过fs模块同步读取js文件并编译执行。
.node。通过C/C++进行编写的Addon。通过dlopen方法进行加载。
.json。读取文件,调用JSON.parse解析加载。
## 从文件模块缓存中加载
缓存中的模块,优先级高于原生模块和文件模块
## 从原生模块加载
原生模块的优先级仅次于文件模块缓存的优先级。require方法在解析文件名之后,优先检查模块是否在原生模块列表中。以http模块为例,尽管在目录下存在一个http/http.js/http.node/http.json文件,require("http")都不会从这些文件中加载,而是从原生模块中加载。
原生模块也有一个缓存区,同样也是优先从缓存区加载。如果缓存区没有被加载过,则调用原生模块的加载方式进行加载和执行。
## 从文件加载
当文件模块缓存中不存在,而且不是原生模块的时候,Node.js会解析require方法传入的参数,并从文件系统中加载实际的文件,加载过程中的包装和编译细节在前一节中已经介绍过,这里我们将详细描述查找文件模块的过程,其中,也有一些细节值得知晓。
require方法接受以下几种参数的传递:
http、fs、path等,原生模块。
./mod或../mod,相对路径的文件模块。
/pathtomodule/mod,绝对路径的文件模块。
mod,非原生模块的文件模块。
# node.js 函数
在JavaScript中,一个函数可以作为另一个函数接收一个参数。我们可以先定义一个函数,然后传递,也可以在传递参数的地方直接定义函数。
Node.js中函数的使用与Javascript类似,举例来说,你可以这样做:
```
function say(word){
console.log(word)
};
function execute(method,val){
method(val)
};
execute(say,"hollo")
```
[function.js](function.js)
## 匿名函数
我们可以把一个函数作为变量传递。但是我们不一定要绕这个"先定义,再传递"的圈子,我们可以直接在另一个函数的括号中定义和传递这个函数:
```
function execute(method,val) {
method(val)
};
execute(function(word){console.log(word)},"hello");
```
[anonymity.js](anonymity.js)
## 函数传递是如何让HTTP服务器工作的
先看一下代码:
```
var http = require("http");
http.createServer(function(request, response) {
response.writeHead(200, {"Content-Type": "text/plain"});
response.write("Hello World");
response.end();
}).listen(8888);
```
现在它看上去应该清晰了很多:我们向 createServer 函数传递了一个匿名函数。
用这样的代码也可以达到同样的目的:
```
var http = require("http");
function onRequest(request, response) {
response.writeHead(200, {"Content-Type": "text/plain"});
response.write("Hello World");
response.end();
}
http.createServer(onRequest).listen(8888);
```
# node.js路由
我们要为路由提供请求的URL和其他需要的GET及POST参数,随后路由需要根据这些数据来执行相应的代码。
因此,我们需要查看HTTP请求,从中提取出请求的URL以及GET/POST参数。这一功能应当属于路由还是服务器(甚至作为一个模块自身的功能)确实值得探讨,但这里暂定其为我们的HTTP服务器的功能。
我们需要的所有数据都会包含在request对象中,该对象作为onRequest()回调函数的第一个参数传递。但是为了解析这些数据,我们需要额外的Node.JS模块,它们分别是url和querystring模块。
```
url.parse(string).query
|
url.parse(string).pathname |
| |
| |
------ -------------------
http://localhost:8888/start?foo=bar&hello=world
--- -----
| |
| |
querystring(string)["foo"] |
|
querystring(string)["hello"]
```
当然我们也可以用querystring模块来解析POST请求体中的参数,稍后会有演示。
现在我们来给onRequest()函数加上一些逻辑,用来找出浏览器请求的URL路径:
```
//server.js
var http=require("http");
var url=require("url");
function start(){
function onRequest(request,response){
var pathName=url.parse(response.url).pathname;
console.log("receive for"+pathName+"received");
response.writeHead(200,{"content-Type":"text/plain"});
response.write("hello word");
response.end();
}
http.createServer(onRequest).listen("8888");
console.log("server has start")
}
export.start=start;
```
好了,我们的应用现在可以通过请求的URL路径来区别不同请求了--这使我们得以使用路由(还未完成)来将请求以URL路径为基准映射到处理程序上。
在我们所要构建的应用中,这意味着来自/start和/upload的请求可以使用不同的代码来处理。稍后我们将看到这些内容是如何整合到一起的。
现在我们可以来编写路由了,建立一个名为 router.js 的文件,添加以下内容:
```
function route (pathName){
console.log("Aboout to route requset for"+pathName)
}
exports.route=route;
```
[router.js](router.js)
如你所见,这段代码什么也没干,不过对于现在来说这是应该的。在添加更多的逻辑以前,我们先来看看如何把路由和服务器整合起来。
我们的服务器应当知道路由的存在并加以有效利用。我们当然可以通过硬编码的方式将这一依赖项绑定到服务器上,但是其它语言的编程经验告诉我们这会是一件非常痛苦的事,因此我们将使用依赖注入的方式较松散地添加路由模块。
首先,我们来扩展一下服务器的start()函数,以便将路由函数作为参数传递过去,server.js 文件代码如下
```
var http=require("http");
var url=require("url");
function start(route){
function onRequest(request,response){
var pathName=url.parse(resquest.url).pathname;
console.log("receive for"+pathName+"received");
route(pathName);
response.writeHead(200,{"content-Type":"text/plain"});
response.write("hello word");
response.end();
}
http.createServer(onRequest).listen("8888");
console.log("server has start")
}
exports.start=start;
```
[server.js](server.js)
同时,我们会相应扩展index.js,使得路由函数可以被注入到服务器中:
```
var route=require("./router.js");
var server=require("./server.js");
server.start(route.route)
```
[index.js](index.js)
在这里,我们传递的函数依旧什么也没做。
如果现在启动应用(node index.js,始终记得这个命令行),随后请求一个URL,你将会看到应用输出相应的信息,这表明我们的HTTP服务器已经在使用路由模块了,并会将请求的路径传递给路由:
启动index.js 并访问 http://127.0.0.1:8888/
# node.js 全局对象
JavaScript 中有一个特殊的对象,称为全局对象(Global Object),它及其所有属性都可以在程序的任何地方访问,即全局变量。
在浏览器 JavaScript 中,通常 window 是全局对象, 而 Node.js 中的全局对象是 global,所有全局变量(除了 global 本身以外)都是 global 对象的属性。
在 Node.js 我们可以直接访问到 global 的属性,而不需要在应用中包含它。
## 全局对象与全局变量
global 最根本的作用是作为全局变量的宿主。按照 ECMAScript 的定义,满足以下条 件的变量是全局变量:
在最外层定义的变量;
全局对象的属性;
隐式定义的变量(未定义直接赋值的变量)。
当你定义一个全局变量时,这个变量同时也会成为全局对象的属性,反之亦然。需要注 意的是,在 Node.js 中你不可能在最外层定义变量,因为所有用户代码都是属于当前模块的, 而模块本身不是最外层上下文。
注意: 永远使用 var 定义变量以避免引入全局变量,因为全局变量会污染 命名空间,提高代码的耦合风险。
## __filename
__filename 表示当前正在执行的脚本的文件名。它将输出文件所在位置的绝对路径,且和命令行参数所指定的文件名不一定相同。 如果在模块中,返回的值是模块文件的路径。
## __dirname
__dirname 表示当前执行脚本所在的目录。
## setTimeout(cb, ms)
setTimeout(cb, ms) 全局函数在指定的毫秒(ms)数后执行指定函数(cb)。:setTimeout() 只执行一次指定函数。
返回一个代表定时器的句柄值。
## clearTimeout(t)
clearTimeout( t ) 全局函数用于停止一个之前通过 setTimeout() 创建的定时器。 参数 t 是通过 setTimeout() 函数创建的定时器。
## setInterval(cb, ms)
setInterval(cb, ms) 全局函数在指定的毫秒(ms)数后执行指定函数(cb)。
返回一个代表定时器的句柄值。可以使用 clearInterval(t) 函数来清除定时器。
setInterval() 方法会不停地调用函数,直到 clearInterval() 被调用或窗口被关闭。
## console
console 用于提供控制台标准输出,它是由 Internet Explorer 的 JScript 引擎提供的调试工具,后来逐渐成为浏览器的事实标准。
Node.js 沿用了这个标准,提供与习惯行为一致的 console 对象,用于向标准输出流(stdout)或标准错误流(stderr)输出字符。
## process
process 是一个全局变量,即 global 对象的属性。
它用于描述当前Node.js 进程状态的对象,提供了一个与操作系统的简单接口。通常在你写本地命令行程序的时候,少不了要 和它打交道。下面将会介绍 process 对象的一些最常用的成员方法。
(注:比较难理解,日后再论)
# node.js常用工具
util 是一个Node.js 核心模块,提供常用函数的集合,用于弥补核心JavaScript 的功能 过于精简的不足。
## util.inherits
util.inherits 是一个实现对象间原型继承的函数。
util.inherits用法如下:
```
var util=require("util");
function Base(){
this.name="base";
this.base=1991;
this.sayHello=function(){
console.log("hello "+this.name)
}
};
Base.prototype.showName=function(){
console.log(this.name)
}
function Sub(){
this.name="sub"
}
util.inherits(Sub,Base);
var objbase=new Base();
objbase.showName();
objbase.sayHello();
console.log(objbase);
var objsub=new Sub();
objsub.showName();
// objsub.sayHello();
console.log(objsub)
```
[utilinherits.js](util.inherits.js)
【注意:只会继承原型上的方法和属性,构造函数的方法和属性不会被继承!】
## util.inspect
util.inspect(object,[showHidden],[depth],[colors])是一个将任意对象转换 为字符串的方法,通常用于调试和错误输出。它至少接受一个参数 object,即要转换的对象。
showHidden 是一个可选参数,如果值为 true,将会输出更多隐藏信息。
depth 表示最大递归的层数,如果对象很复杂,你可以指定层数以控制输出信息的多 少。如果不指定depth,默认会递归2层,指定为 null 表示将不限递归层数完整遍历对象。 如果color 值为 true,输出格式将会以ANSI 颜色编码,通常用于在终端显示更漂亮 的效果。
特别要指出的是,util.inspect 并不会简单地直接把对象转换为字符串,即使该对 象定义了toString 方法也不会调用。
```
var util = require('util');
function Person() {
this.name = 'byvoid';
this.toString = function() {
return this.name;
};
}
var obj = new Person();
console.log(util.inspect(obj));
console.log(util.inspect(obj, true));
```
运行结果:
```
var util = require('util');
function Person() {
this.name = 'byvoid';
this.toString = function() {
return this.name;
};
}
var obj = new Person();
console.log(util.inspect(obj));
console.log(util.inspect(obj, true));
```
## util.isArray(object)
如果给定的参数 "object" 是一个数组返回true,否则返回false。
```
var util = require('util');
util.isArray([])
// true
util.isArray(new Array)
// true
util.isArray({})
// false
```
## util.isRegExp(object)
如果给定的参数 "object" 是一个正则表达式返回true,否则返回false。
```
var util = require('util');
util.isRegExp(/some regexp/)
// true
util.isRegExp(new RegExp('another regexp'))
// true
util.isRegExp({})
// false
```
## util.isDate(object)
如果给定的参数 "object" 是一个日期返回true,否则返回false。
```
var util = require('util');
util.isDate(new Date())
// true
util.isDate(Date())
// false (without 'new' returns a String)
util.isDate({})
// false
```
## util.isError(object)
如果给定的参数 "object" 是一个错误对象返回true,否则返回false。
```
var util = require('util');
util.isError(new Error())
// true
util.isError(new TypeError())
// true
util.isError({ name: 'Error', message: 'an error occurred' })
// false
```
# Node.js 文件系统
Node.js 提供一组类似 UNIX(POSIX)标准的文件操作API。 Node 导入文件系统模块(fs)语法如下所示:
```
var fs = require("fs")
```
## 异步和同步
Node.js 文件系统(fs 模块)模块中的方法均有异步和同步版本,例如读取文件内容的函数有异步的 fs.readFile() 和同步的 fs.readFileSync()。
异步的方法函数最后一个参数为回调函数,回调函数的第一个参数包含了错误信息(error)。
建议大家是用异步方法,比起同步,异步方法性能更高,速度更快,而且没有阻塞。
## 实例
创建 input.txt 文件,内容如下:
```
```
# Node.js GET/POST请求
在很多场景中,我们的服务器都需要跟用户的浏览器打交道,如表单提交。
表单提交到服务器一般都使用GET/POST请求。
本章节我们将为大家介绍 Node.js GET/POST请求。
## 获取get请求的内容
由于GET请求直接被嵌入在路径中,URL是完整的请求路径,包括了?后面的部分,因此你可以手动解析后面的内容作为GET请求的参数。
node.js中url模块中的parse函数提供了这个功能。
```
var http = require('http');
var url = require('url');
var util = require('util');
http.createServer(function(req, res){
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(util.inspect(url.parse(req.url, true)));
}).listen(3000);
```
## 获取POST请求内容
POST请求的内容全部的都在请求体中,http.ServerRequest并没有一个属性内容为请求体,原因是等待请求体传输可能是一件耗时的工作。
比如上传文件,而很多时候我们可能并不需要理会请求体的内容,恶意的POST请求会大大消耗服务器的资源,所有node.js默认是不会解析请求体的, 当你需要的时候,需要手动来做。
```
var http = require('http');
var querystring = require('querystring');
var util = require('util');
http.createServer(function(req, res){
var post = ''; //定义了一个post变量,用于暂存请求体的信息
req.on('data', function(chunk){ //通过req的data事件监听函数,每当接受到请求体的数据,就累加到post变量中
post += chunk;
});
req.on('end', function(){ //在end事件触发后,通过querystring.parse将post解析为真正的POST请求格式,然后向客户端返回。
post = querystring.parse(post);
res.end(util.inspect(post));
});
}).listen(3000);
```<file_sep>var fs=require("fs");
var zlib=require("zlib");
//压缩input.txt文件为 input.txt.gz
fs.createReadStream("input.txt")
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream("input.txt.gz"));
console.log("程序执行完毕!")<file_sep>// module.js
var Hello=require("./hello");
var hello=new Hello();
hello.setName("wangxiaojun");
hello.sayHello();<file_sep>function route (pathName){
console.log("Aboout to route requset for"+pathName)
}
exports.route=route;<file_sep>function execute(method,val) {
method(val)
};
execute(function(word){console.log(word)},"hello");<file_sep>var buf=new Buffer(256);
var len=buf.write("www.wangxiaojun.top");
console.log("写入自己数:"+len)<file_sep>var util=require("util");
console.log(util.isRegExp(/some/));
console.log(util.isRegExp(new RegExp('some')));
console.log(util.isRegExp({}))<file_sep>var fs=require("fs");
var data="晓军博客地址:www.wangxiaojun.top";
//创建一个可写入的流到output.txt中
var writeStream = fs.createWriteStream("output.txt");
//使用utf8写入数据
writeStream.write(data,"UTF8");
//标记文件末尾
writeStream.end();
//处理事件流
writeStream.on("finish",function(){
console.log("写入完成")
})
writeStream.on("error",function(err){
console.log(err)
})
console.log("程序执行完毕")<file_sep># nodeguide
nodejs入门指南
<file_sep>var route=require("./router.js");
var server=require("./server.js");
server.start(route.route) | 67fca6164504547fedf04b8469d2def5de84a8b4 | [
"JavaScript",
"Markdown"
] | 11 | JavaScript | hairichuhe/nodeguide | 67b3dbdd6a0acd4c9d8be4ff5e61a1e719fe1276 | cbe6982ae75e15208e2e9cdd60286f310f76eb53 | |
refs/heads/master | <file_sep># mean-stack-reading-list
Proof of concept to create a personal Reading List using MEAN Stack, Gulp
<file_sep>readingItemsApp.controller('readingItemsCtrl', function($rootScope, $scope, readingItemsFactory) {
$scope.readingItems = [];
$scope.isEditable = [];
// get all readingItems on Load
readingItemsFactory.getReadingItems().then(function(data) {
$scope.readingItems = data.data;
});
// Save a readingItem to the server
$scope.save = function($event) {
if ($event.which == 13 && $scope.readingItemInput) {
readingItemsFactory.saveReadingItem({
"readingItem": $scope.readingItemInput,
"isCompleted": false
}).then(function(data) {
$scope.readingItems.push(data.data);
});
$scope.readingItemInput = '';
}
};
//update the status of the readingItem
$scope.updateStatus = function($event, _id, i) {
var cbk = $event.target.checked;
var _t = $scope.readingItems[i];
readingItemsFactory.updateReadingItem({
_id: _id,
isCompleted: cbk,
readingItem: _t.readingItem
}).then(function(data) {
if (data.data.ok && data.data.nModified) {
_t.isCompleted = cbk;
} else {
alert('Oops something went wrong!');
}
});
};
// Update the edited readingItem
$scope.edit = function($event, i) {
if ($event.which == 13 && $event.target.value.trim()) {
var _t = $scope.readingItems[i];
readingItemsFactory.updateReadingItem({
_id: _t._id,
readingItem: $event.target.value.trim(),
isCompleted: _t.isCompleted
}).then(function(data) {
if (data.data.ok && data.data.nModified) {
_t.readingItem = $event.target.value.trim();
$scope.isEditable[i] = false;
} else {
alert('Oops something went wrong!');
}
});
}
};
// Delete a readingItem
$scope.delete = function(i) {
readingItemsFactory.deleteReadingItem($scope.readingItems[i]._id).then(function(data) {
if (data.data) {
$scope.readingItems.splice(i, 1);
}
});
};
});<file_sep>{
"name": "mean-stack-reading-list",
"version": "1.0.0",
"description": "Reading List using MEAN Stack",
"main": "server.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://github.com/krishnavteja/mean-stack-reading-list.git"
},
"keywords": [
"MEAN-Stack"
],
"author": "<NAME>",
"license": "MIT",
"bugs": {
"url": "https://github.com/krishnavteja/mean-stack-reading-list/issues"
},
"homepage": "https://github.com/krishnavteja/mean-stack-reading-list",
"dependencies": {
"body-parser": "^1.13.1",
"cookie-parser": "^1.3.5",
"debug": "^2.2.0",
"ejs": "^2.3.1",
"express": "^4.13.0",
"mongojs": "^1.0.1",
"morgan": "^1.6.0"
},
"devDependencies": {
"gulp": "^3.9.0",
"gulp-jshint": "^1.11.0",
"gulp-livereload": "^3.8.0",
"gulp-nodemon": "^2.0.3",
"gulp-watch": "^4.2.4"
}
}
<file_sep>readingItemsApp = angular.module('readingItemsApp', ['ngRoute'])
.config(function($routeProvider) {
$routeProvider
.when('/', {
templateUrl: '/partials/readingItems.html',
controller: 'readingItemsCtrl'
}).otherwise({
redirectTo: '/'
});
}); | 390fca026605f488f056ab7ec2441f461195bb12 | [
"Markdown",
"JSON",
"JavaScript"
] | 4 | Markdown | krishnavteja/mean-stack-reading-list | 2e7acbed7b4ee73aa9fce3c0875035476bbbdb5f | 8df56f9ac2b90e254d79c7cce5a7df38febfb3a3 | |
refs/heads/master | <file_sep>package passphrase
//go:generate go run generate.go
import (
"bytes"
"crypto/rand"
"io"
"math/big"
"sync"
)
// MaxWorkerCount sets the concurrency limit for random number generation.
var MaxWorkerCount = 128
var wordsCount = int64(len(Words))
type result struct {
Number int64
Error error
}
func generateRandomNumber(maxSize int64, results chan result) {
bigInt, err := rand.Int(rand.Reader, big.NewInt(maxSize))
if err != nil {
results <- result{0, err}
return
}
results <- result{bigInt.Int64(), nil}
}
func generateRandomNumbers(maxSize int64, results chan result, count int) {
if count <= MaxWorkerCount {
for i := 0; i < count; i++ {
go generateRandomNumber(maxSize, results)
}
return
}
tasks := make(chan int)
var wg sync.WaitGroup
for worker := 0; worker < MaxWorkerCount; worker++ {
wg.Add(1)
go func() {
defer wg.Done()
for range tasks {
generateRandomNumber(maxSize, results)
}
}()
}
for i := 0; i < count; i++ {
tasks <- i
}
close(tasks)
wg.Wait()
}
// Write writes a passphrase with the given number of words.
func Write(writer io.Writer, numberOfWords int) (n int, err error) {
results := make(chan result)
go generateRandomNumbers(wordsCount, results, numberOfWords)
for i := 0; i < numberOfWords; i++ {
result := <-results
if result.Error != nil {
return n, result.Error
}
str := Words[result.Number]
if n != 0 {
str = " " + str
}
bytesWritten, err := io.WriteString(writer, str)
n += bytesWritten
if err != nil {
return n, err
}
}
return n, nil
}
// String returns a passphrase with the given number of words.
func String(numberOfWords int) (str string, err error) {
var buffer bytes.Buffer
_, err = Write(&buffer, numberOfWords)
if err != nil {
return "", err
}
return string(buffer.Bytes()), nil
}
<file_sep>FROM golang:alpine as build
WORKDIR /opt/passphrase
COPY . .
# ldflags explanation (see `go tool link`):
# -s disable symbol table
# -w disable DWARF generation
RUN cd ./passphrase && go build -ldflags="-s -w" -o /bin/passphrase
FROM scratch
COPY --from=build /bin/passphrase /bin/
USER 65534
ENTRYPOINT ["passphrase"]
<file_sep>#!/bin/sh
#
# Starts the given make target and a watch process for source file changes.
# Reloads the active Chrome/Safari/Firefox tab.
# Optionally executes a command on each source file change.
#
# Requires make for the command execution.
# Requires entr for the watch process.
#
# Usage: ./watch.sh target [chrome|safari|firefox] [-- cmd args...]
#
stop() {
STATUS=$?
kill "$PID"
exit $STATUS
}
start() {
make -s "$1" &
PID=$!
}
cd "$(dirname "$0")" || exit 1
trap stop INT TERM
start "$1"
shift
while true; do
find . -name '*.go' | entr -d -p ./reload-browser.sh "$@"
done
<file_sep>package parse_test
import (
"strconv"
"testing"
"github.com/blueimp/passphrase/internal/parse"
)
func TestNaturalNumber(t *testing.T) {
number := parse.NaturalNumber("")
if number != 0 {
t.Errorf(
"Failed to handle empty parameter, got: %d, expected: %d.",
number,
0,
)
}
number = parse.NaturalNumber(strconv.Itoa(parse.MaxInt) + "0")
if number != parse.MaxInt {
t.Errorf(
"Failed to handle int overflow, got: %d, expected: %d.",
number,
parse.MaxInt,
)
}
number = parse.NaturalNumber(strconv.Itoa(-parse.MaxInt-1) + "0")
if number != 0 {
t.Errorf(
"Failed to handle int underflow, got: %d, expected: %d.",
number,
0,
)
}
number = parse.NaturalNumber(strconv.Itoa(parse.MaxInt))
if number != parse.MaxInt {
t.Errorf(
"Failed to handle max int, got: %d, expected: %d.",
number,
parse.MaxInt,
)
}
number = parse.NaturalNumber("banana")
if number != 0 {
t.Errorf(
"Failed to handle non int string, got: %d, expected: %d.",
number,
0,
)
}
for i := 0; i <= 10; i++ {
number := parse.NaturalNumber(strconv.Itoa(i))
if number != i {
t.Errorf(
"Failed to handle positive number as parameter, got: %d, expected: %d.",
number,
i,
)
}
}
for i := -10; i < 0; i++ {
number := parse.NaturalNumber(strconv.Itoa(i))
if number != 0 {
t.Errorf(
"Failed to handle negative number as parameter, got: %d, expected: %d.",
number,
0,
)
}
}
for i := 0; i <= 10; i++ {
number := parse.NaturalNumber("", i)
if number != i {
t.Errorf(
"Failed to handle default number, got: %d, expected: %d.",
number,
i,
)
}
}
for i := 0; i <= 10; i++ {
number := parse.NaturalNumber("100", 0, i)
if number != i {
t.Errorf(
"Failed to respect max number, got: %d, expected: %d.",
number,
i,
)
}
}
for i := 0; i <= 10; i++ {
number := parse.NaturalNumber("-1", 0, parse.MaxInt, i)
if number != i {
t.Errorf(
"Failed to respect min number, got: %d, expected: %d.",
number,
i,
)
}
}
}
<file_sep>package strings
import (
"testing"
)
func TestInSlice(t *testing.T) {
list := []string{"apple", "banana", "coconut"}
if !InSlice("apple", list) {
t.Error("Failed to find string in list.")
}
if InSlice("orange", list) {
t.Error("Incorrectly found non-member string in list.")
}
}
<file_sep>module github.com/blueimp/passphrase
go 1.12
<file_sep>module github.com/blueimp/passphrase/appengine
require github.com/blueimp/passphrase v1.0.0
replace github.com/blueimp/passphrase v1.0.0 => ../
// Use alternative replace pattern to deploy to App Engine:
//replace github.com/blueimp/passphrase v1.0.0 => ./passphrase
<file_sep>package main
import (
"fmt"
"io/ioutil"
"os"
"strconv"
"strings"
"testing"
"github.com/blueimp/passphrase"
istrings "github.com/blueimp/passphrase/internal/strings"
)
func generatePassphrase(args []string) (code int, out string, err string) {
os.Args = append([]string{"noop"}, args...)
outReader, outWriter, _ := os.Pipe()
errReader, errWriter, _ := os.Pipe()
originalOut := os.Stdout
originalErr := os.Stderr
defer func() {
os.Stdout = originalOut
os.Stderr = originalErr
}()
os.Stdout = outWriter
os.Stderr = errWriter
exit = func(c int) {
code = c
}
func() {
main()
outWriter.Close()
errWriter.Close()
}()
stdout, _ := ioutil.ReadAll(outReader)
stderr, _ := ioutil.ReadAll(errReader)
return code, string(stdout), string(stderr)
}
func TestMain(t *testing.T) {
code, out, err := generatePassphrase([]string{})
if code != 0 {
t.Errorf("Unexpected status code, got %d, expected: %d.", code, 0)
}
if err != "" {
t.Errorf("Unexpected error output: %s.", err)
}
number := len(strings.Split(out, " "))
if number != defaultNumber {
t.Errorf(
"Incorrect default number of words, got: %d, expected: %d.",
defaultNumber,
number,
)
}
code, out, err = generatePassphrase([]string{"test"})
if code != 1 {
t.Errorf("Unexpected status code, got %d, expected: %d.", code, -1)
}
if err != "argument is not a natural number: test\n" {
t.Errorf("Expected \"not a natural number\" error, got: \"%s\"", err)
}
if out != "\n" {
t.Errorf("Expected empty passphrase, got: %s", out)
}
code, out, err = generatePassphrase([]string{"0"})
if code != 0 {
t.Errorf("Unexpected status code, got %d, expected: %d.", code, 0)
}
if err != "" {
t.Errorf("Unexpected error output: %s.", err)
}
if out != "\n" {
t.Errorf("Expected empty passphrase, got: %s", out)
}
for i := -1; i >= -10; i-- {
code, out, err := generatePassphrase([]string{strconv.Itoa(i)})
if code != 1 {
t.Errorf("Unexpected status code, got %d, expected: %d.", code, -1)
}
if err != fmt.Sprintf("argument is not a natural number: %d\n", i) {
t.Errorf("Expected \"not a natural number\" error, got: \"%s\"", err)
}
if out != "\n" {
t.Errorf("Expected empty passphrase, got: %s", out)
}
}
for i := 1; i <= 10; i++ {
code, out, err := generatePassphrase([]string{strconv.Itoa(i)})
if code != 0 {
t.Errorf("Unexpected status code, got %d, expected: %d.", code, 0)
}
if err != "" {
t.Errorf("Unexpected error output: %s.", err)
}
words := strings.Split(strings.TrimSuffix(out, "\n"), " ")
number := len(words)
if number != i {
t.Errorf("Incorrect number of words, got: %d, expected: %d.", number, i)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
}
<file_sep>package main
import (
"fmt"
"os"
"github.com/blueimp/passphrase"
"github.com/blueimp/passphrase/internal/parse"
)
const defaultNumber = 4
var exit = os.Exit
func main() {
var number int
if len(os.Args) > 1 {
arg := os.Args[1]
number = parse.NaturalNumber(arg, defaultNumber)
if number == 0 && arg != "0" {
fmt.Fprintln(os.Stderr, "argument is not a natural number:", arg)
exit(1)
}
} else {
number = defaultNumber
}
passphrase.MaxWorkerCount = parse.NaturalNumber(
os.Getenv("PASSPHRASE_MAX_WORKER_COUNT"),
passphrase.MaxWorkerCount,
parse.MaxInt,
1,
)
_, err := passphrase.Write(os.Stdout, number)
if err != nil {
fmt.Fprintln(os.Stderr, err)
exit(1)
}
fmt.Println()
}
<file_sep>#!/bin/sh
#
# Reloads the active tab of the given browser (defaults to Chrome).
# Keeps the browser window in the background (Chrome/Safari only).
# Can optionally execute a given command before reloading the browser tab.
# Browser reloading is supported on MacOS only for now.
#
# Usage: ./reload-browser.sh [chrome|safari|firefox] -- [command args...]
#
set -e
RELOAD_CHROME='tell application "Google Chrome"
reload active tab of window 1
end tell'
RELOAD_SAFARI='tell application "Safari"
set URL of document 1 to (URL of document 1)
end tell'
RELOAD_FIREFOX='activate application "Firefox"
tell application "System Events" to keystroke "r" using command down'
case "$1" in
firefox) OSASCRIPT=$RELOAD_FIREFOX;;
safari) OSASCRIPT=$RELOAD_SAFARI;;
*) OSASCRIPT=$RELOAD_CHROME;;
esac
if shift; then
[ "$1" = "--" ] && shift
"$@"
fi
if command -v osascript > /dev/null 2>&1; then
exec osascript -e "$OSASCRIPT"
fi
<file_sep>../../words.go<file_sep># --- Variables ---
# Include .env file if available:
-include .env
# The platform to use for local development and deployment.
# Can be either "appengine" or "lambda":
PLATFORM ?= appengine
# Fake AWS credentials as fix for AWS SAM Local issue #134:
# See also https://github.com/awslabs/aws-sam-local/issues/134
FAKE_AWS_ENV = AWS_ACCESS_KEY_ID=0 AWS_SECRET_ACCESS_KEY=0
# AWS CLI wrapped with aws-vault for secure credentials access,
# can be overriden by defining the AWS_CLI environment variable:
AWS_CLI ?= aws-vault exec '$(AWS_PROFILE)' -- aws
# The absolute path for the passphrase binary installation:
BIN_PATH = $(GOPATH)/bin/passphrase
# Dependencies to build the passphrase command-line interface:
CLI_DEPS = passphrase/cli.go passphrase/go.mod passphrase.go words.go
# Dependencies to build the lambda application:
LAMBDA_DEPS = lambda/lambda.go lambda/go.mod passphrase.go words.go
# --- Main targets ---
# The default target builds the CLI binary:
all: passphrase/passphrase
# Cross-compiles the lambda binary:
lambda: lambda/bin/main
# Generates the word list as go code:
words:
go generate
# Runs the unit tests for all components:
test: words.go
@go test ./...
@cd passphrase; go test ./...
@cd appengine; go test ./...
@cd lambda; go test ./...
# Installs the passphrase binary at $GOPATH/bin/passphrase:
install: $(BIN_PATH)
# Deletes the passphrase binary from $GOPATH/bin/passphrase:
uninstall:
rm -f $(BIN_PATH)
# Generates a sample lambda event:
event: lambda/event.json
# Invokes the lambda function locally:
invoke: lambda/event.json lambda/bin/main
cd lambda; $(FAKE_AWS_ENV) sam local invoke -e event.json
# Starts a local server for the given platform:
start: $(PLATFORM)-start
# Starts a local server for the given platform and a watch process:
watch: $(PLATFORM)-watch
# Deploys the project for the given platform:
deploy: $(PLATFORM)-deploy
# Opens a browser tab with the production URL of the App Engine project:
browse:
cd appengine; gcloud app browse --project $(PROJECT)
# Prints the API Gateway URL of the deployed lambda function:
url: lambda/passphrase.url
@grep -o 'https://.*' lambda/passphrase.url
# Deletes the CloudFormation stack of the lambda function:
destroy:
rm -f lambda/passphrase.url
$(AWS_CLI) cloudformation delete-stack --stack-name '$(STACK_NAME)'
# Removes all build artifacts:
clean:
rm -f \
lambda/bin/main \
lambda/debug \
lambda/debug.test \
lambda/deploy.yml \
lambda/deployed.txt \
lambda/event.json \
lambda/passphrase.url \
passphrase/debug \
passphrase/debug.test \
passphrase/passphrase
# --- Helper targets ---
# Defines phony targets (targets without a corresponding target file):
.PHONY: \
all \
passphrase \
lambda \
words \
test \
install \
uninstall \
event \
invoke \
start \
watch \
deploy \
browse \
url \
destroy \
appengine-start \
appengine-watch \
appengine-deploy \
lambda-start \
lambda-watch \
lambda-deploy \
clean
# Installs the passphrase binary at $GOPATH/bin/passphrase:
$(BIN_PATH): $(CLI_DEPS)
cd passphrase; go install
# Builds the passphrase binary:
passphrase/passphrase: $(CLI_DEPS)
cd passphrase; go build
# Generates the word list as go code if generate.go or words.txt change:
words.go: generate.go words.txt
go generate
# Starts a local App Engine server:
appengine-start:
cd appengine; dev_appserver.py .
# Starts a local App Engine server and a watch process for source file changes,
# on MacOS also automatically reloads the active Chrome/Safari/Firefox tab:
appengine-watch:
@exec ./watch.sh start $(BROWSER)
# Deploys the App Engine project to Google Cloud:
appengine-deploy:
cd appengine; gcloud app deploy --project $(PROJECT) --version $(VERSION)
# Starts a local API Gateway:
# Fake AWS credentials as fix for AWS SAM Local issue #134:
# See also https://github.com/awslabs/aws-sam-local/issues/134
lambda-start:
cd lambda; AWS_ACCESS_KEY_ID=0 AWS_SECRET_ACCESS_KEY=0 sam local start-api
# Starts a local API Gateway and a watch process for source file changes,
# on MacOS also automatically reloads the active Chrome/Safari/Firefox tab:
lambda-watch:
@exec ./watch.sh start $(BROWSER) -- make -s lambda
# Deploys the lambda function to AWS:
lambda-deploy: lambda/deployed.txt url
# Cross-compiles the lambda binary:
# ldflags explanation (see `go tool link`):
# -s disable symbol table
# -w disable DWARF generation
lambda/bin/main: $(LAMBDA_DEPS)
cd lambda; \
GOOS=linux GOARCH=amd64 go build -ldflags='-s -w' -o bin/main
# Generates a sample lambda event:
lambda/event.json:
cd lambda; sam local generate-event api > event.json
# Packages the lambda binary and uploads it to S3:
lambda/deploy.yml: lambda/bin/main lambda/template.yml
cd lambda; $(AWS_CLI) cloudformation package \
--template-file template.yml \
--s3-bucket '$(DEPLOYMENT_BUCKET)' \
--s3-prefix '$(DEPLOYMENT_PREFIX)' \
--output-template-file deploy.yml
# Deploys the packaged binary to AWS:
lambda/deployed.txt: lambda/deploy.yml
cd lambda; $(AWS_CLI) cloudformation deploy \
--template-file deploy.yml \
--stack-name '$(STACK_NAME)' \
--parameter-overrides LambdaRole='$(LAMBDA_ROLE)'
date >> lambda/deployed.txt
# Generates a passphrase.url file with the API Gateway URL:
lambda/passphrase.url:
API_GW_ID=$$($(AWS_CLI) cloudformation describe-stack-resource \
--stack-name '$(STACK_NAME)' \
--logical-resource-id ServerlessRestApi \
--query StackResourceDetail.PhysicalResourceId \
--output text \
) && \
printf '%s\nURL=https://%s.execute-api.$(AWS_REGION).amazonaws.com/Prod\n' \
[InternetShortcut] \
"$$API_GW_ID" \
> lambda/passphrase.url
<file_sep>../../go.mod<file_sep># Passphrase
> Better passwords by combining random words.
Passphrase is a Go library, command-line interface, Google App Engine
application and AWS Lambda function to generate a random sequence of words.
It is inspired by <NAME>'s [xkcd webcomic #936](https://xkcd.com/936/)
with the title "Password Strength":

## Installation
The `passphrase` command-line interface can be installed via
[go get](https://golang.org/cmd/go/):
```sh
go get github.com/blueimp/passphrase/passphrase
```
## Usage
By default, `passphrase` prints four space-separated words, but also accepts
an argument for the number of words to generate:
```sh
passphrase [number]
```
The concurrency limit for random number generation (default: `128`) can be
adjusted with the environment variable `PASSPHRASE_MAX_WORKER_COUNT`:
```sh
PASSPHRASE_MAX_WORKER_COUNT=1000 passphrase 1000
```
## Import
The `passphrase` library can be imported and used with any type
implementing the [io.Writer interface](https://golang.org/pkg/io/#Writer), e.g.
`os.Stdout`:
```go
package main
import (
"fmt"
"os"
"github.com/blueimp/passphrase"
)
var exit = os.Exit
func main() {
_, err := passphrase.Write(os.Stdout, 4)
if err != nil {
fmt.Fprintln(os.Stderr, err)
exit(1)
}
fmt.Println()
}
```
Or alternatively with a simple `string` return value:
```go
package main
import (
"fmt"
"os"
"github.com/blueimp/passphrase"
)
var exit = os.Exit
func main() {
pass, err := passphrase.String(4)
if err != nil {
fmt.Fprintln(os.Stderr, err)
exit(1)
}
fmt.Println(pass)
}
```
## Word list
This repository includes the word list `google-10000-english-usa-no-swears.txt`
from Josh Kaufman's repository
[google-10000-english](https://github.com/first20hours/google-10000-english/),
but `passphrase` can also be compiled with another list of newline separated
words.
The words module can be generated the following way:
```sh
WORD_LIST_URL=words.txt MIN_WORD_LENGTH=3 make words
```
The `WORD_LIST_URL` variable can point to a URL or a local file path and falls
back to `words.txt`.
Words shorter than `MIN_WORD_LENGTH` (defaults to a minimum word length of `3`
characters) are filtered out.
The updated word list module can then be used in a new build.
## Build
First, clone the project and then switch into its source directory:
```sh
git clone https://github.com/blueimp/passphrase.git
cd passphrase
```
*Please note:*
This project relies on [Go modules](https://github.com/golang/go/wiki/Modules)
for automatic dependency resolution.
To build the CLI binary, run
[Make](https://en.wikipedia.org/wiki/Make_\(software\)) in the repository:
```sh
make
```
The locally built binary can be installed at `$GOPATH/bin/passphrase` with the
following command:
```sh
make install
```
The uninstall command removes the binary from `$GOPATH/bin/passphrase`:
```sh
make uninstall
```
To clean up all build artifacts, run the following:
```sh
make clean
```
## Test
All components come with unit tests, which can be executed the following way:
```sh
make test
```
## Google App Engine
Passphrase can be deployed as
[Google App Engine](https://cloud.google.com/appengine/docs/go/) application.
The application accepts a query parameter `n` to define the number of words to
generate, but limits the sequence to `100` words, e.g.:
```
https://PROJECT.appspot.com/?n=100
```
### Requirements
App engine development and deployment requires the
[Google Cloud SDK](https://cloud.google.com/appengine/docs/standard/go/download)
with the `app-engine-go` component.
On MacOS, `google-cloud-sdk` can be installed via
[Homebrew Cask](https://caskroom.github.io/).
```sh
brew cask install google-cloud-sdk
gcloud components install app-engine-go
```
To make `dev_appserver.py` available in the `PATH`, a symlink has to be added:
```sh
ln -s /usr/local/Caskroom/google-cloud-sdk/latest/google-cloud-sdk/bin/dev_* \
/usr/local/bin/
```
The local watch task requires [entr](https://bitbucket.org/eradman/entr) to be
installed, which is available in the repositories of popular Linux distributions
and can be installed on MacOS via [Homebrew](https://brew.sh/):
```sh
brew install entr
```
### Environment variables
The following variables have to be set, e.g. by adding them to a `.env` file,
which gets included in the provided `Makefile`:
```sh
# The App Engine project:
PROJECT=passphrasebot
# The App Engine project version:
VERSION=1
```
### Deploy
To deploy the application, execute the following:
```sh
make deploy
```
To open the URL of the application in a browser tab, run the following:
```sh
make browse
```
### Local development
To start a local App Engine server, run the following:
```sh
make start
```
On MacOS, to also automatically reload the active Chrome/Safari/Firefox tab, run
the following:
```sh
[BROWSER=chrome|safari|firefox] make watch
```
## AWS Lambda
Passphrase can be deployed as [AWS lambda](https://aws.amazon.com/lambda/)
function with an [API Gateway](https://aws.amazon.com/api-gateway/) triggger.
The function accepts a query parameter `n` to define the number of words to
generate, but limits the sequence to `100` words, e.g.:
```
https://API_GW_ID.execute-api.REGION.amazonaws.com/Prod?n=100
```
### Requirements
Deployment requires the [AWS CLI](https://aws.amazon.com/cli/) as well as
[aws-vault](https://github.com/99designs/aws-vault) for secure credentials
access.
Alternatively, it's also possible to reset the wrapped `aws` CLI command by
exporting `AWS_CLI=aws` as environment variable.
Local invocations require
[AWS SAM Local](https://github.com/awslabs/aws-sam-local).
The local watch task requires [entr](https://bitbucket.org/eradman/entr) to be
installed, which is available in the repositories of popular Linux distributions
and can be installed on MacOS via [Homebrew](https://brew.sh/):
```sh
brew install entr
```
### Environment variables
The following variables have to be set, e.g. by adding them to a `.env` file,
which gets included in the provided `Makefile`:
```sh
# Platform to use for local development and deployment (appengine or lambda):
PLATFORM=lambda
# The AWS profile to use for aws-vault:
AWS_PROFILE=default
# The S3 bucket where the lambda package can be uploaded:
DEPLOYMENT_BUCKET=example-bucket
# The S3 object prefix for the lambda package:
DEPLOYMENT_PREFIX=passphrase
# The CloudFormation stack name:
STACK_NAME=passphrase
# The name of an existing IAM role for AWS Lambda with
# AWSLambdaBasicExecutionRole attached:
LAMBDA_ROLE=arn:aws:iam::000000000000:role/aws-lambda-basic-execution-role
# The AWS service region, required to construct the API Gateway URL:
AWS_REGION=eu-west-1
```
### Build
To build the AWS Lambda function binary, run the following:
```sh
make lambda
```
### Deploy
To package and deploy the function, execute the following:
```sh
make deploy
```
After the deployment succeeds, the
[API Gateway](https://aws.amazon.com/api-gateway/) URL is printed.
This URL can also be retrieved later with the following command:
```sh
make url
```
To remove the AWS Lambda function and API Gateway configuration, execute the
following:
```sh
make destroy
```
### Local development
Using [AWS SAM Local](https://github.com/awslabs/aws-sam-local), the function
can also be invoked and served locally.
A sample API Gateway event can be generated the following way:
```sh
make event
```
To invoke the function locally, execute the following:
```sh
make invoke
```
To start the local API Gateway along with a watch process for source file
changes, run the following:
```sh
[BROWSER=chrome|safari|firefox] make watch
```
The watch task recompiles the lambda binary on changes.
On MacOS, it also automatically reloads the active Chrome/Safari/Firefox tab.
## License
Released under the [MIT license](https://opensource.org/licenses/MIT).
<file_sep>package main
import (
"fmt"
"log"
"net/http"
"os"
"github.com/blueimp/passphrase"
"github.com/blueimp/passphrase/internal/parse"
)
const defaultNumber = 4
const maxNumber = 100
func indexHandler(response http.ResponseWriter, request *http.Request) {
number := parse.NaturalNumber(
request.FormValue("n"),
defaultNumber,
maxNumber,
)
response.Header().Set("cache-control", "private")
response.Header().Set("content-type", "text/plain;charset=utf-8")
response.Header().Set(
"strict-transport-security",
"max-age=31536000;includeSubDomains;preload",
)
response.Header().Set("x-content-type-options", "nosniff")
_, err := passphrase.Write(response, number)
if err != nil {
log.Println(err)
}
}
func main() {
port := os.Getenv("PORT")
if port == "" {
port = "8080"
}
http.HandleFunc("/", indexHandler)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%s", port), nil))
}
<file_sep>package passphrase_test
import (
"bytes"
"io/ioutil"
"strings"
"testing"
"github.com/blueimp/passphrase"
istrings "github.com/blueimp/passphrase/internal/strings"
)
func TestWrite(t *testing.T) {
var buffer bytes.Buffer
for i := 0; i > -10; i-- {
passphrase.Write(&buffer, i)
str := string(buffer.Bytes())
buffer.Reset()
if str != "" {
t.Errorf("Expected empty passphrase, got: %s", str)
}
}
for i := 1; i <= 10; i++ {
passphrase.Write(&buffer, i)
str := string(buffer.Bytes())
buffer.Reset()
words := strings.Split(str, " ")
number := len(words)
if number != i {
t.Errorf("Incorrect number of words, got: %d, expected: %d.", number, i)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
}
func TestString(t *testing.T) {
for i := 0; i > -10; i-- {
str, _ := passphrase.String(i)
if str != "" {
t.Errorf("Expected empty passphrase, got: %s", str)
}
}
for i := 1; i <= 10; i++ {
str, _ := passphrase.String(i)
words := strings.Split(str, " ")
number := len(words)
if number != i {
t.Errorf("Incorrect number of words, got: %d, expected: %d.", number, i)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
}
func benchmarkWrite(i int, b *testing.B) {
for n := 0; n < b.N; n++ {
passphrase.Write(ioutil.Discard, i)
}
}
func benchmarkString(i int, b *testing.B) {
for n := 0; n < b.N; n++ {
passphrase.String(i)
}
}
func BenchmarkWrite4(b *testing.B) { benchmarkWrite(4, b) }
func BenchmarkWrite16(b *testing.B) { benchmarkWrite(16, b) }
func BenchmarkWrite64(b *testing.B) { benchmarkWrite(64, b) }
func BenchmarkWrite256(b *testing.B) { benchmarkWrite(256, b) }
func BenchmarkWrite1024(b *testing.B) { benchmarkWrite(1024, b) }
func BenchmarkString4(b *testing.B) { benchmarkString(4, b) }
func BenchmarkString16(b *testing.B) { benchmarkString(16, b) }
func BenchmarkString64(b *testing.B) { benchmarkString(64, b) }
func BenchmarkString256(b *testing.B) { benchmarkString(256, b) }
func BenchmarkString1024(b *testing.B) { benchmarkString(1024, b) }
<file_sep>package main
import (
"strconv"
"strings"
"testing"
"github.com/aws/aws-lambda-go/events"
"github.com/blueimp/passphrase"
istrings "github.com/blueimp/passphrase/internal/strings"
)
func param(number int) map[string]string {
parameter := strconv.Itoa(number)
return map[string]string{"n": parameter}
}
func passphraseRequest(args map[string]string) events.APIGatewayProxyResponse {
response, err := Handler(&events.APIGatewayProxyRequest{
QueryStringParameters: args,
})
if err != nil {
panic(err)
}
return response
}
func TestHandler(t *testing.T) {
response := passphraseRequest(map[string]string{})
if response.StatusCode != 200 {
t.Errorf("Expected status code 200, got: %d", response.StatusCode)
}
if response.Headers["cache-control"] != "private" {
t.Errorf(
"Expected cache-control \"private\", got: \"%s\"",
response.Headers["cache-control"],
)
}
if response.Headers["content-type"] != "text/plain;charset=utf-8" {
t.Errorf(
"Expected content-type \"text/plain;charset=utf-8\", got: \"%s\"",
response.Headers["content-type"],
)
}
hsts := "max-age=31536000;includeSubDomains;preload"
if response.Headers["strict-transport-security"] != hsts {
t.Errorf(
"Expected strict-transport-security \"%s\", got: \"%s\"",
hsts,
response.Headers["strict-transport-security"],
)
}
if response.Headers["x-content-type-options"] != "nosniff" {
t.Errorf(
"Expected x-content-type-options \"nosniff\", got: \"%s\"",
response.Headers["x-content-type-options"],
)
}
number := len(strings.Split(response.Body, " "))
if number != defaultNumber {
t.Errorf(
"Incorrect default number of words, got: %d, expected: %d.",
defaultNumber,
number,
)
}
for i := 0; i >= -maxNumber; i-- {
response := passphraseRequest(param(i))
if response.Body != "" {
t.Errorf("Expected empty passphrase, got: %s", response.Body)
}
}
for i := 1; i <= maxNumber; i++ {
response := passphraseRequest(param(i))
words := strings.Split(response.Body, " ")
number := len(words)
if number != i {
t.Errorf("Incorrect number of words, got: %d, expected: %d.", number, i)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
for i := maxNumber + 1; i <= maxNumber+11; i++ {
response := passphraseRequest(param(i))
words := strings.Split(response.Body, " ")
number := len(words)
if number != maxNumber {
t.Errorf(
"Incorrect number of words, got: %d, expected: %d.",
number,
maxNumber,
)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
}
<file_sep>package main
import (
"encoding/json"
"log"
"github.com/aws/aws-lambda-go/events"
"github.com/aws/aws-lambda-go/lambda"
"github.com/blueimp/passphrase"
"github.com/blueimp/passphrase/internal/parse"
)
const defaultNumber = 4
const maxNumber = 100
func logRequest(request *events.APIGatewayProxyRequest) {
encodedRequest, err := json.Marshal(request)
if err != nil {
log.Println("Error:", err)
} else {
log.Println("Request:", string(encodedRequest))
}
}
// Handler is the Lambda function handler.
func Handler(request *events.APIGatewayProxyRequest) (
events.APIGatewayProxyResponse,
error,
) {
logRequest(request)
number := parse.NaturalNumber(
request.QueryStringParameters["n"],
defaultNumber,
maxNumber,
)
pass, err := passphrase.String(number)
if err != nil {
return events.APIGatewayProxyResponse{}, err
}
return events.APIGatewayProxyResponse{
StatusCode: 200,
Headers: map[string]string{
"cache-control": "private",
"content-type": "text/plain;charset=utf-8",
"strict-transport-security": "max-age=31536000;includeSubDomains;preload",
"x-content-type-options": "nosniff",
},
Body: pass,
}, nil
}
func main() {
lambda.Start(Handler)
}
<file_sep>package main
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"strconv"
"strings"
"testing"
"github.com/blueimp/passphrase"
istrings "github.com/blueimp/passphrase/internal/strings"
)
func param(number int) string {
return "/?n=" + strconv.Itoa(number)
}
func passphraseRequest(url string) (response http.Response, result string) {
request := httptest.NewRequest("GET", url, nil)
recorder := httptest.NewRecorder()
indexHandler(recorder, request)
response = *recorder.Result()
body, _ := ioutil.ReadAll(response.Body)
return response, string(body)
}
func TestPassphrase(t *testing.T) {
response, result := passphraseRequest("/")
if response.StatusCode != 200 {
t.Errorf("Expected status code 200, got: %d", response.StatusCode)
}
if response.Header.Get("cache-control") != "private" {
t.Errorf(
"Expected cache-control \"private\", got: \"%s\"",
response.Header.Get("cache-control"),
)
}
if response.Header.Get("content-type") != "text/plain;charset=utf-8" {
t.Errorf(
"Expected content-type \"text/plain;charset=utf-8\", got: \"%s\"",
response.Header.Get("content-type"),
)
}
hsts := "max-age=31536000;includeSubDomains;preload"
if response.Header.Get("strict-transport-security") != hsts {
t.Errorf(
"Expected strict-transport-security \"%s\", got: \"%s\"",
hsts,
response.Header.Get("strict-transport-security"),
)
}
if response.Header.Get("x-content-type-options") != "nosniff" {
t.Errorf(
"Expected x-content-type-options \"nosniff\", got: \"%s\"",
response.Header.Get("x-content-type-options"),
)
}
number := len(strings.Split(result, " "))
if number != defaultNumber {
t.Errorf(
"Incorrect default number of words, got: %d, expected: %d.",
defaultNumber,
number,
)
}
for i := 0; i >= -maxNumber; i-- {
_, result := passphraseRequest(param(i))
if result != "" {
t.Errorf("Expected empty passphrase, got: %s", response.Body)
}
}
for i := 1; i <= maxNumber; i++ {
_, result := passphraseRequest(param(i))
words := strings.Split(result, " ")
number := len(words)
if number != i {
t.Errorf("Incorrect number of words, got: %d, expected: %d.", number, i)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
for i := maxNumber + 1; i <= maxNumber+11; i++ {
_, result := passphraseRequest(param(i))
words := strings.Split(result, " ")
number := len(words)
if number != maxNumber {
t.Errorf(
"Incorrect number of words, got: %d, expected: %d.",
number,
maxNumber,
)
}
for _, word := range words {
if !istrings.InSlice(word, passphrase.Words[:]) {
t.Errorf("Passphrase word is not in the word list: %s", word)
}
if len(word) < passphrase.MinWordLength {
t.Errorf(
"Passphrase word is shorter than %d characters: %s",
passphrase.MinWordLength,
word,
)
}
}
}
}
<file_sep>module github.com/blueimp/passphrase/passphrase
require github.com/blueimp/passphrase v1.0.0
replace github.com/blueimp/passphrase v1.0.0 => ../
<file_sep>../../passphrase.go<file_sep>module github.com/blueimp/passphrase/lambda
require (
github.com/aws/aws-lambda-go v1.1.0
github.com/blueimp/passphrase v1.0.0
)
replace github.com/blueimp/passphrase v1.0.0 => ../
<file_sep>package parse
import (
"strconv"
)
// MaxInt is the naximum integer value on this platform.
const MaxInt = int(^uint(0) >> 1)
// NaturalNumber interprets the given string parameter as natural number.
// The given int args set default values and constraints:
// arg[0] => default number (defaults to 0)
// arg[1] => max mumber (defaults to MaxInt)
// arg[2] => min number (default to 0)
// The default number is returned if the parameter is empty.
// The max number is returned if the parameter exceeds its size.
// Zero is returned if the interpreted string is not a natural number.
// The default, max and min numbers are assumed to be natural numbers.
func NaturalNumber(parameter string, args ...int) int {
var number int
var argsLength = len(args)
if parameter == "" {
if argsLength == 0 {
return 0
}
return args[0]
}
number, err := strconv.Atoi(parameter)
if err != nil {
numError, ok := err.(*strconv.NumError)
if ok && numError.Err == strconv.ErrRange && string(parameter[0]) != "-" {
number = MaxInt
} else {
number = 0
}
}
minNumber := 0
if argsLength > 2 {
minNumber = args[2]
}
if number < minNumber {
return minNumber
}
maxNumber := MaxInt
if argsLength > 1 {
maxNumber = args[1]
}
if number > maxNumber {
return maxNumber
}
return number
}
| dd1d33558a81e1023223ea851749a007313b496c | [
"Markdown",
"Makefile",
"Go",
"Go Module",
"Dockerfile",
"Shell"
] | 23 | Go | blueimp/passphrase | f4e42b62b824d5680100f6d360914bd948ee8657 | 8cbe3cb6186cad018047211d30574bac003d0f45 | |
refs/heads/master | <file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package personmanager.bll;
import java.util.ArrayList;
import java.util.List;
import personmanager.be.Person;
import personmanager.be.Teacher;
/**
*
* @author pgn
*/
public class Personmanager
{
private List<Person> persons;
public Personmanager()
{
persons = new ArrayList<>();
}
public Person getPerson(int id)
{
return null;
}
public void addPerson(Person p)
{
}
public void removePerson(int id)
{
}
public List<Person> getAllPersons()
{
return persons;
}
public List<Teacher> getAllTeachers()
{
return null;
}
} | 0ea23bc50f1cbe48a8cec299e914f9e488429574 | [
"Java"
] | 1 | Java | Spatzek/Personmanager | 810663c0fef15678628f39b4020a121d7e9cb4ff | 999f6b1323ab74e841c9dce2e284fc3f13dd3d6d | |
refs/heads/master | <repo_name>herpiko/kelasglib-mean-stack-II-angularjs<file_sep>/src/exercise_3/app.js
var app = angular.module('App', ['ui.router'])
.config(function($stateProvider){
$stateProvider
.state('Main', {
url : '/main',
templateProvider : function($templateCache) {
return $templateCache.get('main.html');
}
})
.state('About', {
url : '/about',
template : '<div>Hello</div>'
/* function($templateCache) { */
/* return $templateCache.get('about.html'); */
/* } */
})
})
.controller('MainController', function($rootScope, $scope){
$scope.title = 'Main Page';
$scope.body = 'Velit ut exercitationem quia et laborum ea. Autem ea est et sed. Molestiae vel non modi temporibus quo enim.';
})
.controller('AboutController', function($rootScope, $scope){
$scope.title = 'About Page';
$scope.body = 'Tentang kami, ...';
})
.run(['$state', function($state){
$state.go('Main');
}])
<file_sep>/src/exercise_5/app.js
var app = angular.module('App', ['ui.router']);
app.config(function($stateProvider){
$stateProvider
.state('Main', {
url : '/main',
templateProvider : function($templateCache) {
return $templateCache.get('main.html');
}
})
.state('New', {
url : '/new',
templateProvider : function($templateCache) {
return $templateCache.get('new.html');
}
})
.state('Detail', {
url : '/detail/{id}',
templateProvider : function($templateCache) {
return $templateCache.get('detail.html');
}
})
})
/* .directive('item', function(){ */
/* return function(sco */
/* }) */
.service('CrudService', function($http){
this.list = function() {
return $http.get('http://localhost:3000/folks');
}
this.get = function(id) {
return $http.get('http://localhost:3000/folks/' + id);
}
this.create = function(data) {
return $http.post('http://localhost:3000/folks', JSON.stringify(data));
}
this.delete = function(id) {
return $http.delete('http://localhost:3000/folks/' + id + '/edit');
}
this.update = function(data) {
return $http.put('http://localhost:3000/folks/' + data._id + '/edit', data);
}
})
.controller('MainController', function($rootScope, $scope, CrudService, $state){
$scope.title = 'Main Page';
$scope.list = [];
$scope.reload = function(){
CrudService.list()
.then(function(result){
$scope.list = result.data;
})
}
$scope.reload();
$scope.delete = function(id){
CrudService.delete(id)
.then(function(result) {
$scope.reload();
})
}
$scope.detail = function(id) {
$state.go('Detail', { id: id } )
}
})
.controller('NewController', function($rootScope, $scope, CrudService, $state){
$scope.title = 'New item';
$scope.save = function(data) {
CrudService.create(data)
.then(function(result){
$state.go('Main');
})
}
})
.controller('DetailController', function($rootScope, $scope, $stateParams, CrudService){
$scope.title = 'Detail';
var id = $stateParams.id;
CrudService.get(id)
.then(function(result){
$scope.data = result.data;
})
.catch(function(result){
// Handle error
console.log(result.data);
})
})
.run(['$state', function($state){
$state.go('Main');
}])
<file_sep>/src/exercise_4/app.js
var app = angular.module('App', ['ui.router'])
.config(function($stateProvider){
$stateProvider
.state('Main', {
url : '/main',
templateProvider : function($templateCache) {
return $templateCache.get('main.html');
}
})
.state('Todo', {
url : '/todo',
templateProvider : function($templateCache) {
return $templateCache.get('todo.html');
}
})
})
.controller('MainController', function($rootScope, $scope){
$scope.title = 'Main Page';
})
.controller('TodoController', function($rootScope, $scope){
$scope.list = [];
// $scope.list = ['ary', 'dendy']
$scope.listObj = [
{
nama : 'Ary',
usia : 20
},
{
nama : 'Irwan',
usia : 21
},
]
$scope.title = 'Todo List';
$scope.add = function(data) {
if (!data) {
return alert('Mana sih datanya');
}
$scope.list.push(data);
$scope.new = '';
}
$scope.delete = function(index) {
$scope.list.splice(index, 1);
}
})
.run(['$state', function($state){
$state.go('Todo');
}])
| 3451335570dec843e72f333424ed73adb96f6c91 | [
"JavaScript"
] | 3 | JavaScript | herpiko/kelasglib-mean-stack-II-angularjs | 6f0ad50af2434f4ffbdc26e9454bfe5d5fbe9270 | a4b5158fcc769671bae1cebb09486417b41e5e97 | |
refs/heads/master | <file_sep>from django.urls import path
from . import views
app_name = "lib"
urlpatterns = [
path("", views.index, name="index"),
# path(
# 'books/',
# BooksListView.as_view(),
# name='books-list'
# ),
# path(
# 'books/<int:pk>/',
# BookDetailView.as_view(),
# name='book-detail'
# ),
# path(
# 'authors/<int:pk>/',
# AuthorDetailView.as_view(),
# name='author-detail'
# ),
]
<file_sep>from django.db import models
# class Genre(models.Model):
# name = models.CharField(max_length=100)
# def __str__(self):
# return f'{self.name} genre'
# class Author(models.Model):
# name = models.CharField(max_length=300)
# pen_name = models.CharField(max_length=300)
# year_birth = models.IntegerField()
# year_death = models.IntegerField(null=True)
# genres = models.ManyToManyField(Genre)
# country = models.CharField(max_length=100)
# photo = models.ImageField(upload_to='authors/')
# biography = models.TextField(max_length=1000)
# def __str__(self):
# return f'{self.pen_name} ({self.name})'
# class Publisher(models.Model):
# name = models.CharField(max_length=200)
# country = models.CharField(max_length=100)
# def __str__(self):
# return f'{self.name} from {self.country}'
# class Book(models.Model):
# authors = models.ManyToManyField(Author)
# pages = models.IntegerField()
# year = models.IntegerField()
# publisher = models.ForeignKey(Publisher, related_name='books', on_delete=models.CASCADE)
# genres = models.ManyToManyField(Genre)
# title = models.CharField(max_length=300)
# description = models.TextField(max_length=1000)
# cover = models.ImageField(upload_to='books_covers/')
# def __str__(self):
# return f'{self.name} book'
<file_sep>Django==3.0.2
psycopg2-binary==2.8.4
pre-commit==2.1.0 | df21c4cc521b011378e98aaa348f3f4d5252a3e2 | [
"Python",
"Text"
] | 3 | Python | QueVege/library | 386371635498de4b5f3394d84e14e86e6d349786 | 7103a0c4d42baadbd415f42d6efd7dc9cff72312 | |
refs/heads/master | <repo_name>kasia948/Zad-10.1-zadanie-java-38<file_sep>/src/ArrayTest.java
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
import java.util.Scanner;
public class ArrayTest {
public static void main(String[] args) throws IOException {
System.out.println("Ile liczb chcesz wprowadzić?");
Scanner input = new Scanner(System.in);
int size= input.nextInt();
double [] array = new double[size];
for (int i = 0; i <array.length ; i++) {
System.out.println("Podaj liczbę zmiennoprzecinkową");
array[i]=input.nextDouble();
}
FileWriter fileWriter = new FileWriter("wprowadzoneLiczby.txt");
BufferedWriter bfw = new BufferedWriter(fileWriter);
bfw.write(Arrays.toString(array));
bfw.close();
}
} | 4f54946990dc6e3397128a6a604c40f3a995c50c | [
"Java"
] | 1 | Java | kasia948/Zad-10.1-zadanie-java-38 | c1be8e1f5abb9119dc86db0b282e03e0407b82c6 | e4c4621d23d7c9ca61dca5d4b446d061fe7df06c | |
refs/heads/master | <file_sep>
This file, public/javascripts/asset.js, should be rendered
taking precedence over app/views/javascripts/asset.js.erb
<file_sep>require File.dirname(__FILE__) + '/../spec_helper'
describe ImagesController do
before do
add_fixture_views(ImagesController)
end
describe "when a matching file is found in app" do
it "renders the file" do
get "app_asset", :format => "txt"
response.should render_template('images/app_asset')
end
end
describe "when a matching file is not found" do
it "raises" do
lambda {
get "bogus", :format => "txt"
}.should raise_error
end
end
end<file_sep>namespace :desert_assets do
desc "Renders plugin-provided assets to RAILS_ROOT/public."
task :render_all => :environment do
Rake::Task[ "desert_assets:render_js" ].invoke
Rake::Task[ "desert_assets:render_css" ].invoke
end
desc "Renders plugin-provided app/views/javascripts/**/* files to public/javascripts."
task :render_js => :environment do
render('javascripts', 'js')
end
desc "Renders plugin-provided app/views/stylesheets/**/* files to public/stylesheets."
task :render_css => :environment do
render('stylesheets', 'css')
end
def render(dirname, extension)
Desert::Manager.plugins_and_app.each do |plugin|
load_path = "#{plugin.templates_path}/#{dirname}"
templates = Dir["#{load_path}/**/*"]
unless templates.empty?
templates.each do |template_path|
relative_path = template_path.gsub(plugin.templates_path, '')
target_path = File.join(RAILS_ROOT, 'public', relative_path.gsub(Regexp.new("\.#{extension}\.[a-z]+$"), ".#{extension}"))
view = ActionView::Base.new(File.join(RAILS_ROOT, 'app', 'views'))
content = view.render(relative_path)
File.open(target_path, 'w') {|f| f.write(content) }
end
end
end
end
end
<file_sep>ENV["RAILS_ENV"] = "test"
require File.expand_path(File.dirname(__FILE__) + "/../../../../config/environment")
require 'spec'
require 'spec/rails'
Spec::Runner.configure do |config|
config.fixture_path = "#{File.dirname(__FILE__)}/../spec/fixtures"
end
def add_fixture_views(controller_class)
controller_class.prepend_view_path(File.join(File.dirname(__FILE__), 'fixtures', 'app', 'views'))
controller_class.prepend_view_path(File.join(File.dirname(__FILE__), 'fixtures', 'public'))
end
<file_sep>class SpecSuite
def files
dir = File.dirname(__FILE__)
Dir["#{dir}/../spec/**/*_spec.rb"]
end
def run
dir = File.dirname(__FILE__)
ARGV.concat ["--options", "#{dir}/spec.opts"]
files.each do |file|
require file
end
result = ::Spec::Runner::CommandLine.run
exit result
end
end
if $0 == __FILE__
SpecSuite.new.run
end
<file_sep>class JavascriptsController < AssetsController
private
def set_headers
headers['Content-Type'] = 'text/javascript; charset=utf-8'
end
end
<file_sep>require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe JavascriptsController do
describe "route generation" do
it "should map #asset" do
route_for(:controller => "javascripts", :action => "asset", :format => "js").should == "/javascripts/asset.js"
end
end
describe "route recognition" do
it "should generate params for #asset" do
params_from(:get, "/javascripts/asset.js").should == {:controller => "javascripts", :action => "asset", :format => "js"}
end
end
end
<file_sep>require File.dirname(__FILE__) + '/../spec_helper'
describe JavascriptsController do
before do
add_fixture_views(JavascriptsController)
end
describe "when a matching file is found in app" do
it "renders the file" do
get "app_asset", :format => "js"
response.should render_template('javascripts/app_asset')
end
end
describe "when a matching file is not found" do
it "raises" do
lambda {
get "bogus", :format => "txt"
}.should raise_error
end
end
end
<file_sep>class ImagesController < AssetsController
def render_with_image_view(options = nil, extra_options = {}, &block)
params = request.parameters
format = params[:format]
options = {
:status => 200,
:filename => "#{params[:action]}.#{format}",
:disposition => 'inline',
:type => "image/#{format}"
}
data = render_to_string
send_file_headers! options.merge(:text => data, :length => data.size)
@performed_render = false
render_without_image_view options
end
alias_method_chain :render, :image_view
def render_to_string(options = nil, &block)
render_without_image_view(options, &block)
ensure
erase_render_results
forget_variables_added_to_assigns
reset_variables_added_to_assigns
end
end
<file_sep>images 'images/:action.:format', :controller => 'images'
javascripts 'javascripts/:action.:format', :controller => 'javascripts'
stylesheets 'stylesheets/:action.:format', :controller => 'stylesheets'
<file_sep>require File.dirname(__FILE__) + '/../spec_helper'
describe StylesheetsController do
before do
add_fixture_views(StylesheetsController)
end
describe "when a matching file is found in app" do
it "renders the file" do
get "app_asset", :format => "css"
response.should render_template('stylesheets/app_asset')
end
end
describe "when a matching file is not found" do
it "raises" do
lambda {
get "bogus", :format => "css"
}.should raise_error
end
end
end
<file_sep>= Desert Assets
== About Desert
Desert is a Rails plugin framework that makes it easy to share models, views,
controllers, helpers, routes, and migrations across your applications.
With Desert, reusability doesn't come at the cost of extensibility: it's trivial to extend
the functionality of a plugin - both in your application _and_ in other plugins.
Classes are automatically mixed in with your own or other plugins' classes.
This allows you to make full featured composable components.
Check it out: http://github.com/pivotal/desert
== About Desert Assets
TODO: Stuff about how Desert Assets allows your Desert plugin to provide assets
(javascripts, stylesheets, images) without a need to copy them to the application.
== Installation and Configuration
* Install and configure Desert
* Generate your Desert plugin
script/generate desert_plugin my_plugin
* Configure your plugin to depend on Desert Assets
# File: vendor/plugin/my_plugin/init.rb
require_plugin 'desert_assets'
* Configure your application to load routes from Desert Assets
# File: config/routes.rb
ActionController::Routing::Routes.draw do |map|
map.routes_from_plugin(:desert_assets)
# ...
end
== Usage
* Javascripts:
# Example file: vendor/plugins/my_plugin/app/views/javascripts/sahara.js.erb
function find_oasis() {
}
* Stylesheets (TODO):
* Images (TODO):
== Running Specs
In order to run any of the provided RSpec specs, you need a Rails application with
these plugins installed:
* desert
* desert_assets
* rspec
* rspec-rails
Copyright (c) 2010 Pivotal Labs. This software is licensed under the MIT License.
<file_sep>class AssetsController < ApplicationController
before_filter :set_headers
after_filter { |controller| controller.cache_page }
session :off
layout nil
private
def set_headers
# optionally override
end
end
| c1ed5aa98bbf5543a8d1ad25486776ccb33b76a0 | [
"JavaScript",
"RDoc",
"Ruby"
] | 13 | JavaScript | pivotalexperimental/desert_assets | e35e21d807ab1a8f9bf396203244419ed6dbe19a | 9806855fa3479d402b3cb462de9af5121accf28b | |
refs/heads/master | <repo_name>CheeryW/team-project-team24<file_sep>/app/src/test/java/com/cse110team24/walkwalkrevolution/StatsOnAnyRouteUnitTest.java
package com.cse110team24.walkwalkrevolution;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.view.View;
import android.widget.TextView;
import com.cse110team24.walkwalkrevolution.activities.teams.TeamRoutesActivity;
import com.cse110team24.walkwalkrevolution.activities.userroutes.RouteDetailsActivity;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsDatabaseServiceObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsRoutesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.route.WalkStats;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.RoutesManager;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.robolectric.annotation.LooperMode;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import androidx.recyclerview.widget.RecyclerView;
import androidx.test.core.app.ActivityScenario;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import junit.framework.TestCase;
import static com.cse110team24.walkwalkrevolution.HomeActivity.APP_PREF;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
@RunWith(AndroidJUnit4.class)
@LooperMode(LooperMode.Mode.PAUSED)
public class StatsOnAnyRouteUnitTest extends TestInjection {
private TeamsDatabaseServiceObserver observer;
private List<Route> teamRoutesList;
private ActivityScenario<TeamRoutesActivity> teamRoutesScenario;
private ActivityScenario<RouteDetailsActivity> mRouteDetailsScenario;
SharedPreferences sp;
@Before
public void setup() {
super.setup();
sp = ApplicationProvider.getApplicationContext().getSharedPreferences(APP_PREF, Context.MODE_PRIVATE);
sp.edit().putString(IUser.USER_NAME_KEY, testUser.getDisplayName())
.putString(IUser.EMAIL_KEY, testUser.getEmail())
.putString(IUser.TEAM_UID_KEY, testUser.teamUid())
.commit();
Mockito.when(dsf.createDatabaseService(DatabaseService.Service.TEAMS)).thenReturn(teamsDatabaseService);
Mockito.doAnswer(invocation -> {
observer = invocation.getArgument(0);
return invocation;
}).when(teamsDatabaseService).register(any());
teamRoutesList = new ArrayList<>();
}
@Test
public void testCurrentUserCompletedTeammateRoute_currUserStatsAreDisplayed() {
teamRoutesList.clear();
setupTest();
launchTeamRoutesActivity();
teamRoutesScenario = ActivityScenario.launch(TeamRoutesActivity.class);
teamRoutesScenario.onActivity(activity -> {
getRecyclerViewCells(activity);
TestCase.assertEquals("666 steps", firstStepsTv.getText().toString());
});
}
@Test
public void testCurrentUserCompletedTeammateRoute_diffRouteHasCorrectStats() {
teamRoutesList.clear();
setupTest();
launchTeamRoutesActivity();
teamRoutesScenario = ActivityScenario.launch(TeamRoutesActivity.class);
teamRoutesScenario.onActivity(activity -> {
getRecyclerViewCells(activity);
TestCase.assertEquals("89 steps", secondStepsTv.getText().toString());
});
}
private void setupTest() {
setTestTeammateRoute();
teamRoutesList.add(testTeamRouteCompletedByUser);
teamRoutesList.add(testTeamRouteNotCompletedByUser);
setCurrentUserRoute();
saveTeamRouteForUser(testCurrentUserRoute);
}
Route testCurrentUserRoute;
WalkStats testCurrentUserStats;
private void setCurrentUserRoute() {
Calendar calendar = Calendar.getInstance();
calendar.set(1, 1, 1);
testCurrentUserStats = WalkStats.builder()
.addSteps(666)
.addDateCompleted(calendar)
.addTimeElapsed(666)
.addDistance(6.6)
.build();
testCurrentUserRoute = new Route.Builder("A Test Route")
.addRouteUid("1")
.addCreatorDisplayName("Teammate")
.addWalkStats(testCurrentUserStats)
.build();
}
WalkStats testTeammateStats;
Route testTeamRouteCompletedByUser;
Route testTeamRouteNotCompletedByUser;
private void setTestTeammateRoute() {
Calendar calendar = Calendar.getInstance();
calendar.set(1, 1, 1);
testTeammateStats = WalkStats.builder()
.addSteps(89)
.addDateCompleted(calendar)
.addTimeElapsed(89)
.addDistance(5.5)
.build();
testTeamRouteCompletedByUser = new Route.Builder("A Test Route")
.addRouteUid("1")
.addCreatorDisplayName("Teammate")
.addWalkStats(testTeammateStats)
.build();
testTeamRouteNotCompletedByUser = new Route.Builder("Other Route")
.addRouteUid("6")
.addCreatorDisplayName("Teammate")
.addWalkStats(testTeammateStats)
.build();
}
private void launchTeamRoutesActivity() {
Mockito.doAnswer(invocation -> {
((TeamsRoutesObserver) observer).onRoutesRetrieved(teamRoutesList, null);
return invocation;
}).when(teamsDatabaseService).getUserTeamRoutes(anyString(), anyString(), anyInt(), any());
}
private void saveTeamRouteForUser(Route route) {
try {
RoutesManager.writeSingle(route, route.getRouteUid(), ApplicationProvider.getApplicationContext());
} catch (IOException e) {
e.printStackTrace();
}
}
private TextView firstStepsTv;
private TextView secondStepsTv;
private void getRecyclerViewCells(Activity activity) {
RecyclerView recyclerView = activity.findViewById(R.id.recycler_view_team_routes);
View firstView = recyclerView.getLayoutManager().findViewByPosition(0);
View secondView = recyclerView.getLayoutManager().findViewByPosition(1);
firstStepsTv = firstView.findViewById(R.id.tv_routes_steps);
secondStepsTv = secondView.findViewById(R.id.tv_routes_steps);
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/models/team/TeamAdapter.java
package com.cse110team24.walkwalkrevolution.models.team;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter.UID_KEY;
public class TeamAdapter implements ITeam {
public static final String MEMBERS_KEY = "teamMembers";
private List<IUser> team;
private String uid;
public TeamAdapter(List<IUser> team) {
this.team = team;
}
@Override
public List<IUser> getTeam() {
return team;
}
@Override
public boolean addMember(IUser user) {
try {
team.add(user);
} catch(Exception e) {
e.printStackTrace();
return false;
}
return true;
}
@Override
public String getUid() {
return uid;
}
@Override
public String documentKey() {
return getUid();
}
@Override
public void setUid(String uid) {
this.uid = uid;
}
@Override
public Map<String, Object> teamData() {
Map<String, Object> teamData = new HashMap<>();
teamData.put(UID_KEY, uid);
teamData.put(MEMBERS_KEY, team);
return teamData;
}
}<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/observers/InvitationsDatabaseServiceObserver.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.observers;
import com.cse110team24.walkwalkrevolution.models.invitation.Invitation;
import java.util.List;
public interface InvitationsDatabaseServiceObserver {
/**
* Called by the InvitationsDatabaseServiceSubject this observer is observing when the requested invitations
* list is ready to be read.
* @param invitations the requested pending invitations list.
*/
void onUserPendingInvitations(List<Invitation> invitations);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/models/route/WalkStats.java
package com.cse110team24.walkwalkrevolution.models.route;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.text.NumberFormat;
import java.text.DecimalFormat;
public class WalkStats implements Serializable {
private long steps;
private long timeElapsed;
private double distance;
private Calendar dateCompleted;
public WalkStats(long steps, long timeElapsed, double distance, Calendar dateCompleted) {
this.steps = steps;
this.timeElapsed = timeElapsed;
this.distance = distance;
this.dateCompleted = dateCompleted;
}
private WalkStats() {
dateCompleted = Calendar.getInstance();
}
public long getSteps() {
return steps;
}
public void setSteps(long steps) {
this.steps = steps;
}
public long getTimeElapsed() {
return timeElapsed;
}
public void setTimeElapsed(long timeElapsed) {
this.timeElapsed = timeElapsed;
}
public double getDistance() {
return distance;
}
public void setDistance(double distance) {
this.distance = distance;
}
public Calendar getDateCompleted() {
return dateCompleted;
}
public void setDateCompleted(Calendar dateCompleted) {
this.dateCompleted = dateCompleted;
}
public double timeElapsedInMinutes() {
return timeElapsed / 1000.0 / 60;
}
@Override
public boolean equals(Object o) {
if (o instanceof WalkStats) {
WalkStats stats = (WalkStats) o;
return steps == stats.steps && timeElapsed == stats.timeElapsed &&
distance == stats.distance && Objects.equals(dateCompleted, stats.dateCompleted);
}
return false;
}
@Override
public String toString() {
return "\ndistance: " + formattedDistance() +
"\ntime: " + formattedTime() +
"\ndate completed: " + dateCompleted.getTime();
}
public String formattedDistance() {
return format(distance,"mile(s)");
}
public String formattedTime() {
return format(timeElapsedInMinutes(), "min.");
}
public String formattedDate() {
Date date = dateCompleted.getTime();
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd", Locale.US);
return sdf.format(date);
}
private String format(double val, String suffix) {
NumberFormat format = new DecimalFormat("#0.00");
return String.format("%s %s", format.format(val), suffix);
}
public Map<String, Object> statsData() {
Map<String, Object> data = new HashMap<>();
data.put("steps", steps);
data.put("elapsedTimeMillis", timeElapsed);
data.put("distance", distance);
data.put("date", dateCompleted.getTime());
return data;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
WalkStats mWalkStats = new WalkStats();
public Builder addDateCompleted(Calendar calendar) {
mWalkStats.setDateCompleted(calendar);
return this;
}
public Builder addDistance(double distance) {
mWalkStats.setDistance(distance);
return this;
}
public Builder addTimeElapsed(long timeElapsed) {
mWalkStats.setTimeElapsed(timeElapsed);
return this;
}
public Builder addSteps(long steps) {
mWalkStats.setSteps(steps);
return this;
}
public WalkStats build() {
return mWalkStats;
}
}
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/CheckTeamScreenBDDEspressoTest.java
package com.cse110team24.walkwalkrevolution;
import android.content.Context;
import androidx.test.espresso.ViewInteraction;
import androidx.test.filters.LargeTest;
import androidx.test.runner.AndroidJUnit4;
import com.cse110team24.walkwalkrevolution.fitness.FitnessServiceFactory;
import com.cse110team24.walkwalkrevolution.mockedservices.MockActivityTestRule;
import com.cse110team24.walkwalkrevolution.mockedservices.TestAuth;
import com.cse110team24.walkwalkrevolution.mockedservices.TestFitnessService;
import com.cse110team24.walkwalkrevolution.models.team.TeamAdapter;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.List;
import static androidx.test.espresso.Espresso.onData;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.hasDescendant;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static com.cse110team24.walkwalkrevolution.mockedservices.TestFitnessService.TEST_SERVICE_KEY;
import static com.cse110team24.walkwalkrevolution.mockedservices.TestTeamsDatabaseService.testTeam;
import static com.cse110team24.walkwalkrevolution.mockedservices.TestTeamsDatabaseService.testTeamUid;
import static net.bytebuddy.matcher.ElementMatchers.is;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anything;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.StringStartsWith.startsWith;
/** Scenario: User has a team, sees team members listed in app
*
* Given that the user has a team,
* When they click on the "Team" button from the home screen
* Then a list of their fellow teammates will be displayed.
*/
@LargeTest
@RunWith(AndroidJUnit4.class)
public class CheckTeamScreenBDDEspressoTest {
private List<IUser> listOfUsers;
IUser amara_momoh;
IUser satta_momoh;
@Rule
public MockActivityTestRule<LoginActivity> mActivityTestRule = new MockActivityTestRule<>(LoginActivity.class);
@Before
public void setup() {
FitnessServiceFactory.put(TEST_SERVICE_KEY, activity -> new TestFitnessService(activity));
mActivityTestRule.getActivity().setFitnessServiceKey(TEST_SERVICE_KEY);
TestAuth.isTestUserSignedIn = true;
TestAuth.successUserSignedIn = true;
satta_momoh = FirebaseUserAdapter.builder()
.addDisplayName("<NAME>")
.addEmail("<EMAIL>")
.addUid("1")
.addTeamUid("666")
.build();
TestAuth.testAuthUser = satta_momoh;
listOfUsers = new ArrayList<IUser>();
testTeam = new TeamAdapter(listOfUsers);
amara_momoh = FirebaseUserAdapter.builder()
.addDisplayName("<NAME>")
.addEmail("<EMAIL>")
.addUid("2")
.addTeamUid("666")
.build();
testTeam.addMember(amara_momoh);
testTeamUid = "666";
androidx.test.platform.app.InstrumentationRegistry.getInstrumentation().getTargetContext()
.getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE).edit().putString(amara_momoh.TEAM_UID_KEY, testTeamUid).commit();
}
@Test
public void checkTeamScreenEspressoTest() {
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.enter_gmail_address), isDisplayed()));
appCompatEditText.perform(replaceText("<EMAIL>"), closeSoftKeyboard());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.enter_password), isDisplayed()));
appCompatEditText2.perform(replaceText("pretzel"), closeSoftKeyboard());
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.et_height_feet), isDisplayed()));
appCompatEditText4.perform(replaceText("5"), closeSoftKeyboard());
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.et_height_remainder_inches), isDisplayed()));
appCompatEditText5.perform(replaceText("4"), closeSoftKeyboard());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.btn_height_finish), withText("Login"), isDisplayed()));
appCompatButton.perform(click());
ViewInteraction bottomNavigationItemView = onView(
allOf(withId(R.id.action_team), withContentDescription("Team"), isDisplayed()));
bottomNavigationItemView.perform(click());
onView(withId(R.id.text_no_teammates)).check(matches(not(isDisplayed())));
onView(withId(R.id.list_members_in_team)).check(matches(isDisplayed()));
onData(anything())
.inAdapterView(withId(R.id.list_members_in_team))
.atPosition(0)
.check(matches(hasDescendant(
withText(containsString("Amara Momoh")))));
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/activities/teams/TeammatesListViewAdapter.java
package com.cse110team24.walkwalkrevolution.activities.teams;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.graphics.Color;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.cse110team24.walkwalkrevolution.R;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeammateStatus;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.Utils;
import java.util.List;
import java.util.Random;
import static com.google.common.base.Ascii.toUpperCase;
public class TeammatesListViewAdapter extends BaseAdapter {
private static final String TAG = "WWR_TeammatesListViewAdapter";
Context context;
List<IUser> users;
LayoutInflater inflater;
boolean showStatusIcons;
private SharedPreferences mPreferences;
public TeammatesListViewAdapter(Context context, List<IUser> users, SharedPreferences preferences) {
this.context = context;
this.users = users;
mPreferences = preferences;
inflater = (LayoutInflater.from(context));
}
@Override
public int getCount() {
return users.size();
}
@Override
public Object getItem(int i) {
return users.get(i);
}
@Override
public long getItemId(int i) {
return 0;
}
public void setShowStatusIcons(boolean showIcons) {
showStatusIcons = showIcons;
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
View newView;
if (view == null) {
newView = inflater.inflate(R.layout.item_teammate, viewGroup, false);
ImageView statusView = newView.findViewById(R.id.statusView);
TextView nameView = newView.findViewById(R.id.nameView);
TextView initialView = newView.findViewById(R.id.initialView);
setStatusImage(statusView, users.get(i).getLatestWalkStatus());
if (showStatusIcons) statusView.setVisibility(View.VISIBLE);
nameView.setText(users.get(i).getDisplayName());
String name = users.get(i).getDisplayName();
initialView.setText(Utils.getInitials(name, -1));
setInitialsColor(initialView, name, i);
} else {
newView = view;
}
return newView;
}
private void setStatusImage(ImageView statusView, TeammateStatus status) {
if (status == null) return;
if (status == TeammateStatus.ACCEPTED)
statusView.setBackgroundResource(R.drawable.ic_check_green_24dp);
else if (status == TeammateStatus.DECLINED_NOT_INTERESTED)
statusView.setBackgroundResource(R.drawable.ic_close_black_24dp);
else if (status == TeammateStatus.DECLINED_SCHEDULING_CONFLICT)
statusView.setBackgroundResource(R.drawable.ic_event_busy_black_24dp);
else if (status == TeammateStatus.PENDING)
statusView.setBackgroundResource(R.drawable.ic_schedule_black_24dp);
}
/**
* makes the color for a teammate permanent by saving it to SharedPreferences
* @param initialsView TextView that holds the teammate's initials
* @param name the teammate's name
* @param idx the idx of the teammate in this adapter's data set
*/
private void setInitialsColor(TextView initialsView, String name, int idx) {
int savedColor = mPreferences.getInt(name, -1);
Log.d(TAG, "setInitialsColor: getting " + name + "'s color " + savedColor);
if (savedColor == -1) {
savedColor = Utils.generateRandomARGBColor(idx + 50);
Log.d(TAG, "setInitialsColor: generated new color " + savedColor);
mPreferences.edit().putInt(name, savedColor).apply();
}
initialsView.setTextColor(savedColor);
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/MockActivity.java
package com.cse110team24.walkwalkrevolution;
import androidx.appcompat.app.AppCompatActivity;
import android.app.Activity;
import android.content.Intent;
import android.icu.text.SimpleDateFormat;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import java.text.ParseException;
public class MockActivity extends AppCompatActivity {
private static final String TAG = "WWR_MockActivity";
private static final int ADD_MOCK_CONST = 500;
public static final String START_WALK_BTN_VISIBILITY_KEY = "start button";
public static final String ADDED_STEPS_KEY = "added_steps";
public static final String SETTING_START_TIME_KEY = "setting_start_time";
public static final String INPUT_TIME_KEY = "input_time";
public static final String TIME_FMT = "HH:mm:ss";
public static final int REQUEST_CODE = 6;
private long totalAddedSteps;
private boolean settingStartTime;
private Button stepsMockBtn;
private Button finishBtn;
private EditText inputtedTime;
private TextView totalStepsView;
private TextView enterTimePromptTv;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mock);
getUIFields();
checkWhichTimeToSet();
setStepsMockOnClickListener();
setFinishBtnOnClickListener();
}
private void getUIFields() {
stepsMockBtn = findViewById(R.id.btn_increment_steps);
finishBtn = findViewById(R.id.btn_mock_finish);
finishBtn.setEnabled(false);
inputtedTime = findViewById(R.id.et_edit_time);
totalStepsView = findViewById(R.id.tv_added_steps);
enterTimePromptTv = findViewById(R.id.tv_enter_time);
TextWatcher textWatcher = getTextWatcher();
inputtedTime.addTextChangedListener(textWatcher);
}
private void setStepsMockOnClickListener() {
stepsMockBtn.setOnClickListener(view -> {
totalAddedSteps += ADD_MOCK_CONST;
totalStepsView.setText(String.valueOf(totalAddedSteps));
});
}
private void setFinishBtnOnClickListener() {
finishBtn.setOnClickListener(view -> {
finishMockActivity();
});
}
private TextWatcher getTextWatcher() {
return new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
finishBtn.setEnabled(validateTime());
}
};
}
private void finishMockActivity() {
Intent intent = new Intent()
.putExtra(ADDED_STEPS_KEY, totalAddedSteps)
.putExtra(SETTING_START_TIME_KEY, settingStartTime)
.putExtra(INPUT_TIME_KEY, inputtedTime.getText().toString());
setResult(Activity.RESULT_OK, intent);
Log.i(TAG, "finishMockActivity: mocked steps: " + totalAddedSteps);
finish();
}
private boolean validateTime() {
SimpleDateFormat sdf = new SimpleDateFormat(TIME_FMT);
sdf.setLenient(false);
try {
sdf.parse(inputtedTime.getText().toString());
} catch (ParseException e) {
return false;
}
return true;
}
private void checkWhichTimeToSet() {
settingStartTime = getIntent().getIntExtra(START_WALK_BTN_VISIBILITY_KEY, -1) == View.VISIBLE;
if (!settingStartTime) {
enterTimePromptTv.setText(R.string.enter_end_time);
}
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/services/InvitationsDatabaseService.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.services;
import com.cse110team24.walkwalkrevolution.firebase.firestore.subjects.InvitationsDatabaseServiceSubject;
import com.cse110team24.walkwalkrevolution.models.invitation.Invitation;
import com.cse110team24.walkwalkrevolution.models.invitation.InvitationStatus;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.google.android.gms.tasks.Task;
import java.util.List;
/**
* Handles provider database interactions with invitation-related documents and collections.
*/
public interface InvitationsDatabaseService extends InvitationsDatabaseServiceSubject, DatabaseService {
/**
* Create an invitation document for the receiving user's received invitations in
* this service's provider database.
*
* <p>Updates the invitation's ID to that of the newly created document</p>
* @param invitation the invitation being sent to the user.
* @return a task containing the result of trying to create the invitation document.
*/
Task<?> addInvitationForReceivingUser(Invitation invitation);
/**
* Create an invitation document for the sending user's sent invitations in
* this service's provider database.
* @param invitation the invitation being sent by the user.
* @return a task containing the result of trying to create the invitation document.
*/
Task<?> addInvitationForSendingUser(Invitation invitation);
/**
* Update the given invitation's document for the receiving user in this service's provider
* database.
* @param invitation the invitation that is being updated. Must have an invitation ID.
*/
void updateInvitationForReceivingUser(Invitation invitation);
/**
* Update the given invitation's document for the sending user in this service's provider
* database.
* @param invitation the invitation that is being updated. Must have an invitation ID.
*/
void updateInvitationForSendingUser(Invitation invitation);
/**
* Query this service's provider database for the given user's received invitations that have
* {@link InvitationStatus} equal to PENDING.
* <p>On complete, calls {@link InvitationsDatabaseServiceSubject#notifyObserversPendingInvitations(List)} to
* notify observers that the invitations are ready to read.</p>
* @param user
*/
void getUserPendingInvitations(IUser user);
void addInvitationsSnapshotListener(IUser user);
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/mockedservices/MockActivityTestRule.java
package com.cse110team24.walkwalkrevolution.mockedservices;
import android.app.Activity;
import android.content.Context;
import androidx.test.rule.ActivityTestRule;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.cse110team24.walkwalkrevolution.application.FirebaseApplicationWWR;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
/** TODO
* new activity test rule to forcibly remove app data
* Use this rule in your tests
* use it like this [this will the instance variable for the testing class] :
* [@rule] - no brackets
* public MockActivityTestRule<LoginActivity> mActivityTestRule = new MockActivityTestRule<>(LoginActivity.class);
* if you need to change what happens before the activity is launched, feel free to extend this class
*
*/
public class MockActivityTestRule<LoginActivity extends Activity> extends ActivityTestRule<LoginActivity> {
FirebaseApplicationWWR testApplicationWWR;
public MockActivityTestRule(Class<LoginActivity> activityClass) {
super(activityClass);
}
@Override
protected void beforeActivityLaunched() {
super.beforeActivityLaunched();
androidx.test.platform.app.InstrumentationRegistry.getInstrumentation().getTargetContext()
.getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE)
.edit()
.remove(HomeActivity.HEIGHT_FT_KEY)
.remove(HomeActivity.HEIGHT_IN_KEY)
.remove(IUser.TEAM_UID_KEY)
.remove(IUser.USER_NAME_KEY)
.remove(IUser.EMAIL_KEY)
.remove("357")
.apply();
FirebaseApplicationWWR.setAuthServiceFactory(new TestAuth.TestAuthFactory());
FirebaseApplicationWWR.setDatabaseServiceFactory(new TestDatabaseServiceFactory());
FirebaseApplicationWWR.setMessagingServiceFactory(new TestMessage.TestMessagingFactory());
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/models/team/walk/TeammateStatus.java
package com.cse110team24.walkwalkrevolution.models.team.walk;
import java.util.HashMap;
import java.util.Map;
public enum TeammateStatus {
DECLINED_SCHEDULING_CONFLICT("declined the walk due to a scheduling conflict"),
DECLINED_NOT_INTERESTED("declined the walk because they're not interested"),
ACCEPTED("accepted the walk!"),
PENDING("hasn't responded");
private String mReason;
TeammateStatus(String reason) {
mReason = reason;
}
public String getReason() {
return mReason;
}
public Map<String, Object> dataInMapForm() {
Map<String, Object> data = new HashMap<>();
data.put("status", getReason());
return data;
}
public static TeammateStatus get(String value) {
return lookup.get(value);
}
private static final Map<String, TeammateStatus> lookup = new HashMap<>();
static {
for (TeammateStatus status : TeammateStatus.values()) {
lookup.put(status.getReason(), status);
}
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/application/ApplicationObserver.java
package com.cse110team24.walkwalkrevolution.application;
public interface ApplicationObserver {
void onNewToken(String token);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/fitness/GoogleFitAdapter.java
package com.cse110team24.walkwalkrevolution.fitness;
import android.util.Log;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.google.android.gms.auth.api.signin.GoogleSignIn;
import com.google.android.gms.auth.api.signin.GoogleSignInAccount;
import com.google.android.gms.fitness.Fitness;
import com.google.android.gms.fitness.FitnessOptions;
import com.google.android.gms.fitness.data.DataType;
import com.google.android.gms.fitness.data.Field;
public class GoogleFitAdapter implements FitnessService {
private static final String TAG = "WWR_GoogleFitAdapter";
private static final double STRIDE_LEN_CONST = 0.413;
private static final int FEET_IN_MILE = 5280;
private static final int INCHES_IN_FEET = 12;
private static final long MILLIS_IN_DAY = 86_400_000;
private final int GOOGLE_FIT_PERMISSIONS_REQUEST_CODE = System.identityHashCode(this) & 0xFFFF;
private GoogleSignInAccount account;
private HomeActivity activity;
private long recordingStartTime;
private long recordingEndTime;
private long recordingInitSteps;
private long updatedSteps;
private long stepsToAdd;
public GoogleFitAdapter(HomeActivity activity) {
this.activity = activity;
}
@Override
public int getRequestCode() {
return GOOGLE_FIT_PERMISSIONS_REQUEST_CODE;
}
@Override
public double getDistanceFromHeight(long steps, int heightFeet, float heightRemainderInches) {
double totalHeightInches = (INCHES_IN_FEET * heightFeet) + heightRemainderInches;
double avgStrideLen = (totalHeightInches * STRIDE_LEN_CONST) / INCHES_IN_FEET;
double stepsPerMile = FEET_IN_MILE / avgStrideLen;
return steps / stepsPerMile;
}
@Override
public void setStartRecordingTime(long startTime) {
recordingStartTime = startTime;
}
@Override
public void setEndRecordingTime(long startTime) {
recordingEndTime = startTime;
}
@Override
public void setStepsToAdd(long stepsToAdd) {
this.stepsToAdd += stepsToAdd;
}
@Override
public void setup() {
FitnessOptions fitnessOptions = FitnessOptions.builder()
.addDataType(DataType.TYPE_STEP_COUNT_DELTA, FitnessOptions.ACCESS_READ)
.addDataType(DataType.AGGREGATE_STEP_COUNT_DELTA, FitnessOptions.ACCESS_READ)
.build();
GoogleSignInAccount lastAcct = GoogleSignIn.getLastSignedInAccount(activity);
account = (lastAcct == null) ? GoogleSignIn.getAccountForExtension(activity, fitnessOptions) : lastAcct;
if (!GoogleSignIn.hasPermissions(account, fitnessOptions)) {
GoogleSignIn.requestPermissions(
activity,
GOOGLE_FIT_PERMISSIONS_REQUEST_CODE,
account,
fitnessOptions);
} else {
updateDailyStepCount();
}
}
@Override
public void updateDailyStepCount() {
if (account == null) {
return;
}
Fitness.getHistoryClient(activity, account)
.readDailyTotal(DataType.TYPE_STEP_COUNT_DELTA)
.addOnSuccessListener(dataSet -> {
updatedSteps = dataSet.isEmpty()
? 0 : dataSet.getDataPoints().get(0).getValue(Field.FIELD_STEPS).asInt();
Log.i(TAG, "updateDailyStepCount: successful steps update: " + updatedSteps);
})
.addOnFailureListener(e ->
Log.e(TAG, "updateDailyStepCount: there was a problem getting the daily step count.", e)
);
activity.setDailyStats(updatedSteps + stepsToAdd);
}
@Override
public void startRecording() {
updateDailyStepCount();
recordingInitSteps = updatedSteps + stepsToAdd;
}
@Override
public void stopRecording() {
long timeElapsed = recordingEndTime - recordingStartTime;
if (timeElapsed < 0) {
timeElapsed += MILLIS_IN_DAY;
}
updateDailyStepCount();
long totalSteps = updatedSteps + stepsToAdd - recordingInitSteps;
activity.setLatestWalkStats(totalSteps, timeElapsed);
}
}<file_sep>/README.md
# References
* The Espresso Tests use a TestRule adapted from [this SO post](https://stackoverflow.com/questions/37597080/reset-app-state-between-instrumentationtestcase-runs)
* The GoogleFit API adapter, FitnessService, FitnessServiceFactor, the Testing FitnessService all adapted from [lab 4](https://github.com/UCSD-CSE-110-2020/lab4-fitness)
* This website helped us learn to write unit tests with Robolectric: [Using Robolectric for Android unit testing on the JVM - Tutorial](https://www.vogella.com/tutorials/Robolectric/article.html)
* This website helped us learn to build up RecyclerView: [Using the RecyclerView](https://guides.codepath.com/android/using-the-recyclerview) (accessed on February 4th, 2020)
* This website helped us learn view animation for when the view enters and exits [Animating Android Activities and Views with Left and Right Slide Animations](https://kylewbanks.com/blog/left-and-right-slide-animations-on-android-activity-or-view) (accessed on February 10th, 2020)
* We learned to get and display date from Java Calendar object from [this SO post](https://stackoverflow.com/questions/3574811/how-can-i-get-a-date-from-my-calendar) (accessed on February 3rd, 2020)
Note: If not specified, the access date for each of the above references is between February 2, 2020 and February 16, 2020.
# TODO
1. We NEED to write more tests that do the same thing a couple of times to make sure toggling doesn't mess it up
2. If possible, write tests that actually close and reopen the app to see what happens
# Design Patterns in Practice
1. **Adapter**
- This pattern is the most used. Every GoogleFit or Firebase api is masked through an Adapter which implements an interface
- examples include `GoogleFitAdapter implements FitnessService` which adapts Google's Fit API and `FirebaseAuthAdapter implements AuthService` which adapts Google's FirebaseAuth.
2. **Builder**
- Many of our data models have multiple optional member variables. The `interface Builder` and interfaces that extend this interface help facilitate this.
- Examples include `RouteBuilder implements IRouteBuilder` and `RouteEnvironmentBuilder implements IRouteEnvironmentBuilder`
3. **Observer**
- Though not used as extensively, this helps solve a lot of the issues that occur with Google APIs' async network calls. By implementing `Subject` and observers, classes that adapt these kinds of methods can notify our application when calls are complete.
- An example of this is `interface AuthService extends Subject<AuthServiceObserver>`
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/auth/AuthSubject.java
package com.cse110team24.walkwalkrevolution.firebase.auth;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.Subject;
public interface AuthSubject extends Subject<AuthObserver> {
/**
* Notify this subject's observers that the user was signed in successfully.
* <p>See also: {@link AuthObserver#onUserSignedIn(IUser)}.</p>
* @param user the user that was signed in
*/
void notifyObserversSignedIn(IUser user);
/**
* Notify this subject's observers that the user was signed up successfully.
* <p>See also: {@link AuthObserver#onUserSignedUp(IUser)}.</p>
* @param user the user that was signed up
*/
void notifyObserversSignedUp(IUser user);
/**
* Notify this subject's observers that an AuthError was encountered during sign-in.
* <p>See also: {@link AuthObserver#onAuthSignInError(Auth.AuthError)}.</p>
* @param error the error that was encountered
*/
void notifyObserversSignInError(Auth.AuthError error);
/**
* Notify this subject's observers that an AuthError was encountered during sign-up
* <p>See also: {@link AuthObserver#onAuthSignUpError(Auth.AuthError)}.</p>
* @param error the error that was encountered
*/
void notifyObserversSignUpError(Auth.AuthError error);
}
<file_sep>/functions/index.js
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.sendInviteNotification = functions.firestore
.document('invitations/{userInvite}/received/{inviteId}')
.onCreate((snap, context) => {
const document = snap.exists ? snap.data() : null;
if (document) {
var message = {
notification: {
title: document.from.name + ' has invited you to join a team!',
body: 'Click to accept or decline this invitation'
},
topic: context.params.userInvite
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent invite:', message);
return response;
})
.catch((error) => {
console.log('Error sending invite:', error);
return error;
});
}
return "document was null or empty";
});
exports.sendInvitationResponseNotification = functions.firestore
.document('invitations/{userInvite}/sent/{inviteId}')
.onUpdate((change, context) => {
const document = change.after.data();
if (document) {
var message = {
notification: {
title: document.to.name + ' has ' + document.status + ' your invitation!',
body: 'Click to see your team'
},
topic: context.params.userInvite
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent invite:', message);
return response;
})
.catch((error) => {
console.log('Error sending invite:', error);
return error;
});
}
return "document was null or empty";
});
exports.sendNewTeammateNotification = functions.firestore
.document('teams/{team}/teammates/{teammate}')
.onCreate((snap, context) => {
const document = snap.exists ? snap.data() : null;
if (document) {
var message = {
notification: {
title: document.displayName + ' has joined your team!',
body: 'Click to see your team'
},
topic: context.params.team
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent new team notification:', message);
return response;
})
.catch((error) => {
console.log('Error sending new team notification:', error);
return error;
});
}
return "document was null or empty";
});
exports.sendNewTeamWalkNotification = functions.firestore
.document('teams/{team}/teamWalks/{teamWalk}')
.onCreate((snap, context) => {
const document = snap.exists ? snap.data() : null;
if (document) {
var message = {
notification: {
title: document.proposedBy + ' has proposed a team walk',
body: 'Click to see your team'
},
topic: context.params.team
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent new team walk notification:', message);
return response;
})
.catch((error) => {
console.log('Error sending new team walk notification:', error);
return error;
});
}
return "document was null or empty";
});
exports.sendUpdateTeamWalkNotification = functions.firestore
.document('teams/{team}/teamWalks/{teamWalk}')
.onUpdate((change, context) => {
const document = change.after.data();
if (document) {
var message = {
notification: {
title: document.proposedBy + ' has ' + document.status.toLowerCase() + ' a team walk',
body: 'Click to see your team'
},
topic: context.params.team
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent team walk update notification:', message);
return response;
})
.catch((error) => {
console.log('Error sending team walk update notification:', error);
return error;
});
}
return "document was null or empty";
});
exports.sendTeammateUpdateWalkStatusNotification = functions.firestore
.document('teams/{team}/teamWalks/{teamWalk}/teammateStatuses/{teammateStatus}')
.onUpdate((change, context) => {
const document = change.after.data();
if (document) {
var statusString = document.status;
var messageTitle = statusString;
var messageBody = 'Click to see your team';
var statusArr = statusString.split(' ');
if (statusArr[0] === 'declined') {
messageTitle = 'declined';
statusArr.shift();
messageBody = statusArr.join(' ');
}
var message = {
notification: {
title: document.displayName + ' has ' + messageTitle,
body: messageBody
},
topic: context.params.team
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent teammate changed status for walk notification:', message);
return response;
})
.catch((error) => {
console.log('Error sending teammate changed status for walk notification:', error);
return error;
});
}
return "document was null or empty";
});
exports.sendTeammateChoseStatusNotification = functions.firestore
.document('teams/{team}/teamWalks/{teamWalk}/teammateStatuses/{teammateStatus}')
.onCreate((snap, context) => {
const document = snap.exists ? snap.data() : null;
if (document) {
var statusString = document.status;
var messageTitle = statusString;
var messageBody = 'Click to see your team';
var statusArr = statusString.split(' ');
if (statusArr[0] === 'declined') {
messageTitle = 'declined';
statusArr.shift();
messageBody = statusArr.join(' ');
}
var message = {
notification: {
title: document.displayName + ' has ' + messageTitle,
body: messageBody
},
topic: context.params.team
};
return admin.messaging().send(message)
.then((response) => {
console.log('Successfully sent teammate changed status for walk notification:', message);
return response;
})
.catch((error) => {
console.log('Error sending teammate changed status for walk notification:', error);
return error;
});
}
return "document was null or empty";
});<file_sep>/milestone1_tests/AddFromRoutesEspressoTest.java
package com.cse110team24.walkwalkrevolution;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import com.cse110team24.walkwalkrevolution.fitness.FitnessService;
import com.cse110team24.walkwalkrevolution.fitness.FitnessServiceFactory;
import com.cse110team24.walkwalkrevolution.fitness.GoogleFitAdapter;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import androidx.test.espresso.ViewInteraction;
import androidx.test.filters.LargeTest;
import androidx.test.rule.ActivityTestRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.action.ViewActions.scrollTo;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withClassName;
import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
@LargeTest
@RunWith(AndroidJUnit4.class)
public class AddFromRoutesEspressoTest {
/**
* new activity test rule to forcibly remove app data
* @param <T>
* notes: see https://stackoverflow.com/questions/37597080/reset-app-state-between-instrumentationtestcase-runs
*/
class AddFromRoutesTestRule<T extends Activity> extends ActivityTestRule<T> {
AddFromRoutesTestRule(Class<T> activityClass) {
super(activityClass);
}
@Override
protected void beforeActivityLaunched() {
super.beforeActivityLaunched();
androidx.test.platform.app.InstrumentationRegistry.getInstrumentation().getTargetContext()
.getSharedPreferences(HomeActivity.HEIGHT_PREF, Context.MODE_PRIVATE)
.edit()
.remove(HomeActivity.HEIGHT_FT_KEY)
.remove(HomeActivity.HEIGHT_IN_KEY)
.apply();
}
}
private static final String TEST_SERVICE = "TEST_SERVICE";
@Rule
public ActivityTestRule<LoginActivity> mActivityTestRule = new AddFromRoutesTestRule<>(LoginActivity.class);
private long nextStepCount;
@Before
public void setup() {
FitnessServiceFactory.put(TEST_SERVICE, new FitnessServiceFactory.BluePrint() {
@Override
public FitnessService create(HomeActivity activity) {
return new TestFitnessService(activity);
}
});
SharedPreferences.Editor edit = mActivityTestRule.getActivity().getSharedPreferences(HomeActivity.HEIGHT_PREF, Context.MODE_PRIVATE).edit();
edit.putFloat(HomeActivity.HEIGHT_IN_KEY, -1f);
edit.putInt(HomeActivity.HEIGHT_FT_KEY, -1);
edit.commit();
nextStepCount = 0;
mActivityTestRule.getActivity().setFitnessServiceKey(TEST_SERVICE);
androidx.test.platform.app.InstrumentationRegistry.getInstrumentation()
.getTargetContext()
.deleteFile(RoutesActivity.LIST_SAVE_FILE);
}
@Test
public void addFromRoutesEspressoTest() {
setup();
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.et_height_feet),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText.perform(replaceText("5"), closeSoftKeyboard());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.et_height_remainder_inches),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
3),
isDisplayed()));
appCompatEditText2.perform(replaceText("3"), closeSoftKeyboard());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.btn_height_finish), withText("Finish"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
5),
isDisplayed()));
appCompatButton.perform(click());
ViewInteraction bottomNavigationItemView = onView(
allOf(withId(R.id.action_routes_list), withContentDescription("Routes"),
childAtPosition(
childAtPosition(
withId(R.id.bottom_navigation),
0),
1),
isDisplayed()));
bottomNavigationItemView.perform(click());
ViewInteraction floatingActionButton = onView(
allOf(withId(R.id.fab),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
floatingActionButton.perform(click());
ViewInteraction appCompatEditText3 = onView(
allOf(withId(R.id.et_save_route_title),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
1)));
appCompatEditText3.perform(scrollTo(), replaceText("Test"), closeSoftKeyboard());
ViewInteraction appCompatButton2 = onView(
allOf(withId(R.id.btn_save_route), withText("SAVE"),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
16)));
appCompatButton2.perform(scrollTo(), click());
ViewInteraction textView = onView(
allOf(withId(R.id.tv_route_name), withText("Test"), isDisplayed()));
textView.check(matches(withText("Test")));
ViewInteraction button = onView(
allOf(withId(R.id.btn_routes_favorite), isDisplayed()));
button.check(matches(isDisplayed()));
}
private static Matcher<View> childAtPosition(
final Matcher<View> parentMatcher, final int position) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("Child at position " + position + " in parent ");
parentMatcher.describeTo(description);
}
@Override
public boolean matchesSafely(View view) {
ViewParent parent = view.getParent();
return parent instanceof ViewGroup && parentMatcher.matches(parent)
&& view.equals(((ViewGroup) parent).getChildAt(position));
}
};
}
private class TestFitnessService implements FitnessService {
private static final String TAG = "[TestFitnessService]: ";
private HomeActivity activity;
public TestFitnessService(HomeActivity activity) {
this.activity = activity;
}
@Override
public int getRequestCode() {
return 0;
}
@Override
public void setup() {
System.out.println(TAG + "setup");
}
@Override
public void updateDailyStepCount() {
System.out.println(TAG + "updateStepCount");
activity.setDailyStats(nextStepCount);
}
@Override
public void startRecording() {
}
@Override
public void stopRecording() {
}
@Override
public double getDistanceFromHeight(long steps, int heightFeet, float heightRemainderInches) {
return new GoogleFitAdapter(null).getDistanceFromHeight(steps, heightFeet, heightRemainderInches);
}
@Override
public void setStartRecordingTime(long startTime) {
}
@Override
public void setEndRecordingTime(long startTime) {
}
@Override
public void setStepsToAdd(long stepsToAdd) {
}
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/services/TeamsDatabaseService.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.services;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamWalksObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.subjects.TeamsDatabaseServiceSubject;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.team.ITeam;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalk;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeammateStatus;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.google.firebase.firestore.DocumentSnapshot;
import java.util.List;
/**
* Handles provider database interactions with team-related documents and collections.
*/
public interface TeamsDatabaseService extends TeamsDatabaseServiceSubject, DatabaseService {
/**
* Create a team document in this service's provider database.
* @param user the user whose team is being created.
* @return the newly created document's ID, to be used as the user's teamUid.
*/
String createTeamInDatabase(IUser user);
/**
* Updates the team document in this service's provider database to include the new user as a
* teammate.
* @param user the user who is being added as a teammate to the team
* @param teamUid the already-existing team's ID
*/
void addUserToTeam(IUser user, String teamUid);
/**
* Query this service's provider database for the teammates in the specified team document.
* All teammates' data are returned except the currently signed in user's.
* <p>On complete, a call to {@link TeamsDatabaseServiceSubject#notifyObserversTeamRetrieved(ITeam)} is made
* to notify observers that the team is ready.</p>
* @param teamUid the already-existing team's ID
* @param currentUserDisplayName the currently signed-in user's displayName
*/
void getUserTeam(String teamUid, String currentUserDisplayName);
/**
* Query this service's provider database for the routes in the specified team document, limited
* by the amount of routes requested. All teammates's routes are given except those of the currently
* signed in user.
* <p>On complete, a call to {@link TeamsDatabaseServiceSubject#notifyObserversTeamRoutesRetrieved(List, DocumentSnapshot)}
* is made containing up to the amount of requested routes and a DocumentSnapshot to the last document retrieved.</p>
* @param teamUid the already-existing team's ID
* @param currentUserDisplayName the currently signed in user's display name.
* @param routeLimitCount the amount of routes to Query the database for. List of routes may be <=
* this number.
* @param lastRoute the last retrieved route's document, used to determine where to start querying this
* service's provider database from given the last request. Should be null if ruquesting
* routes for the first time.
*/
void getUserTeamRoutes(String teamUid, String currentUserDisplayName, int routeLimitCount, DocumentSnapshot lastRoute);
/**
* Creates a route document in this service's provider database for the given team's routes.
* @param teamUid the already-existing team's ID
* @param route the route whose document is being added to the specified team's routes
*/
void uploadRoute(String teamUid, Route route);
/**
* Updates a route document in this service's provider database for the given team's routes.
* @param teamUid the already-existing team's ID
* @param route the route whose document is being updated in the specified team's routes
*/
void updateRoute(String teamUid, Route route);
/**
* Update current Team Walk in database or create it if it DNE
* @param teamWalk team walk that is being proposed, scheduled, cancelled, or withdrawn
* @return the team walk's uid whether it was created or updated
*/
String updateCurrentTeamWalk(TeamWalk teamWalk);
/**
* Query this service's provider database for up to teamWalkLimitCt amount of team walks,
* in descending order by timestamp of day walk was proposed. To obtain the latest walk,
* teamWalkLimitCt should == 1.
*
* <p>On successful complete, all observers of type {@link TeamsTeamWalksObserver}
* are notified with a call to {@link TeamsDatabaseServiceSubject#notifyObserversTeamWalksRetrieved(List)}</p>
* @param teamUid the uid of the team whose walks are being requested
* @param teamWalkLimitCt the amount of team walks to query from database.
*/
void getLatestTeamWalksDescendingOrder(String teamUid, int teamWalkLimitCt);
/**
* Update the specified user's status for a proposed or scheduled team walk to changedStatus.
* @param user the user whose status is being changed, must have a non-null documentKey.
* @param teamWalk the walk in which the user is changing their status
* @param changedStatus the new status.
*/
void changeTeammateStatusForLatestWalk(IUser user, TeamWalk teamWalk, TeammateStatus changedStatus);
/**
* Request the database for the specified team's status for all members
* @param teamWalk the team walk being requested, must have a lid teamWalkUid.
* @param teamUid the teamUid of the teamWalk being requested.
*/
void getTeammateStatusesForTeamWalk(TeamWalk teamWalk, String teamUid);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/models/user/UserBuilder.java
package com.cse110team24.walkwalkrevolution.models.user;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeammateStatus;
import com.cse110team24.walkwalkrevolution.utils.Builder;
import com.cse110team24.walkwalkrevolution.models.invitation.Invitation;
import java.util.List;
public interface UserBuilder extends Builder<IUser> {
UserBuilder addEmail(String email);
UserBuilder addInvitationsList(List<Invitation> invitations);
UserBuilder addDisplayName(String displayName);
UserBuilder addUid(String uid);
UserBuilder addTeamUid(String teamUid);
UserBuilder addLatestWalkStatus(TeammateStatus status);
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/ScheduleWalkFromProposedWalkEspressoTest.java
package com.cse110team24.walkwalkrevolution;
import android.os.IBinder;
import android.view.WindowManager;
import androidx.test.espresso.Root;
import androidx.test.espresso.ViewInteraction;
import androidx.test.espresso.matcher.ViewMatchers;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import com.cse110team24.walkwalkrevolution.activities.teams.InviteTeamToWalkActivity;
import com.cse110team24.walkwalkrevolution.fitness.FitnessServiceFactory;
import com.cse110team24.walkwalkrevolution.mockedservices.MockActivityTestRule;
import com.cse110team24.walkwalkrevolution.mockedservices.TestAuth;
import com.cse110team24.walkwalkrevolution.mockedservices.TestFitnessService;
import com.cse110team24.walkwalkrevolution.mockedservices.TestInvitationsDatabaseService;
import com.cse110team24.walkwalkrevolution.mockedservices.TestMessage;
import com.cse110team24.walkwalkrevolution.mockedservices.TestTeamsDatabaseService;
import com.cse110team24.walkwalkrevolution.mockedservices.TestUsersDatabaseService;
import com.cse110team24.walkwalkrevolution.models.invitation.Invitation;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.team.TeamAdapter;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalk;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalkStatus;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.google.firebase.Timestamp;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.TreeMap;
import static androidx.test.espresso.Espresso.onData;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.Espresso.pressBack;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.action.ViewActions.scrollTo;
import static androidx.test.espresso.action.ViewActions.swipeUp;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.contrib.RecyclerViewActions.actionOnItemAtPosition;
import static androidx.test.espresso.matcher.CursorMatchers.withRowString;
import static androidx.test.espresso.matcher.RootMatchers.withDecorView;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription;
import static androidx.test.espresso.matcher.ViewMatchers.withEffectiveVisibility;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static com.cse110team24.walkwalkrevolution.mockedservices.TestFitnessService.TEST_SERVICE_KEY;
import static com.cse110team24.walkwalkrevolution.mockedservices.TestTeamsDatabaseService.testTeam;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anything;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
public class ScheduleWalkFromProposedWalkEspressoTest {
@Rule
public MockActivityTestRule<LoginActivity> mActivityTestRule = new MockActivityTestRule<>(LoginActivity.class);
//public MockActivityTestRule<InviteTeamToWalkActivity> mActivityInviteTeamToWalk = new MockActivityTestRule<>(InviteTeamToWalkActivity.class);
@Before
public void setup() {
FitnessServiceFactory.put(TEST_SERVICE_KEY, activity -> new TestFitnessService(activity));
mActivityTestRule.getActivity().setFitnessServiceKey(TEST_SERVICE_KEY);
TestAuth.isTestUserSignedIn = true;
TestAuth.successUserSignedIn = true;
TestUsersDatabaseService.testCurrentUserData = new HashMap<>();
TestUsersDatabaseService.testCurrentUserData.put("displayName", "Emulator User");
TestUsersDatabaseService.testCurrentUserData.put("email", "<EMAIL>");
TestUsersDatabaseService.testCurrentUserData.put("teamUid", "666");
TestAuth.testAuthUser = FirebaseUserAdapter.builder()
.addDisplayName("Emulator User")
.addEmail("<EMAIL>")
.addTeamUid("666")
.build();
TestTeamsDatabaseService.testTeamRoutes = new ArrayList<>();
TestTeamsDatabaseService.testTeamRoutes.add(new Route.Builder("Title").addCreatorDisplayName("Emulator User").build());
TestTeamsDatabaseService.testTeam = new TeamAdapter(new ArrayList<>());
IUser userOne = FirebaseUserAdapter.builder()
.addDisplayName("User 1")
.addEmail("<EMAIL>")
.addTeamUid("666")
.addUid("1")
.build();
IUser userTwo = FirebaseUserAdapter.builder()
.addDisplayName("User 2")
.addEmail("<EMAIL>")
.addTeamUid("666")
.addUid("1")
.build();
TestTeamsDatabaseService.testTeam.addMember(userOne);
TestTeamsDatabaseService.testTeam.addMember(userTwo);
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm a");
Date date = null;
try {
date = sdf.parse("04/20/2020 4:20 PM");
} catch (ParseException e) {
e.printStackTrace();
}
TestTeamsDatabaseService.testTeamStatuses = new TreeMap<>();
TestTeamsDatabaseService.testTeamStatuses.put("User 1", "accepted the walk!");
TestTeamsDatabaseService.testTeamStatuses.put("User 2", "accepted the walk!");
TestTeamsDatabaseService.testTeamWalks = new ArrayList<>();
TestTeamsDatabaseService.testTeamWalks.add(TeamWalk.builder()
.addProposedRoute(new Route.Builder("Title").addCreatorDisplayName("Emulator User").build())
.addProposedBy("Emulator User")
.addProposedDateAndTime(new Timestamp(date))
.build());
}
@Test
public void scheduleWalkFromProposedWalk() {
setup();
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.enter_gmail_address), isDisplayed()));
appCompatEditText.perform(replaceText("<EMAIL>"), closeSoftKeyboard());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.enter_password), isDisplayed()));
appCompatEditText2.perform(replaceText("1234jam"), closeSoftKeyboard());
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.et_height_feet), isDisplayed()));
appCompatEditText4.perform(replaceText("5"), closeSoftKeyboard());
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.et_height_remainder_inches), isDisplayed()));
appCompatEditText5.perform(replaceText("7"), closeSoftKeyboard());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.btn_height_finish), withText("Login"), isDisplayed()));
appCompatButton.perform(click());
ViewInteraction bottomNavigationItemView = onView(
allOf(withId(R.id.action_team), withContentDescription("Team"), isDisplayed()));
bottomNavigationItemView.perform(click());
ViewInteraction appCompatButton1 = onView(
allOf(withId(R.id.btn_scheduled_walks), withText("Scheduled and Proposed Walks"), isDisplayed()));
appCompatButton1.perform(click());
ViewInteraction appCompatButton2 = onView(
allOf(withId(R.id.schedule_propose_btn_schedule), withText("SCHEDULE"), isDisplayed()));
appCompatButton2.perform(click());
pressBack();
ViewInteraction appCompatButton3 = onView(
allOf(withId(R.id.btn_scheduled_walks), withText("Scheduled and Proposed Walks"), isDisplayed()));
appCompatButton3.perform(click());
onView(withId(R.id.schedule_propose_tv_walk_date)).check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
onView(withId(R.id.schedule_propose_linear_layout_status_buttons)).check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
onView(withId(R.id.list_members_with_status)).check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/activities/teams/TeamRoutesActivity.java
package com.cse110team24.walkwalkrevolution.activities.teams;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Log;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.cse110team24.walkwalkrevolution.R;
import com.cse110team24.walkwalkrevolution.activities.userroutes.RouteDetailsActivity;
import com.cse110team24.walkwalkrevolution.activities.userroutes.RouteRecyclerViewAdapter;
import com.cse110team24.walkwalkrevolution.application.FirebaseApplicationWWR;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsRoutesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.TeamsDatabaseService;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.Utils;
import com.google.firebase.firestore.DocumentSnapshot;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* {@inheritDoc}
* Handles displaying the currently signed in user's teammates' routes.
* <ol>
* <li>Sets up RecyclerViewAdapter and gets first batch of team routes, ordered by teammate name</li>
* <li>After a predetermined amount of scrolling or time, requests more routes from the database,
* starting after route last queried</li>
* </ol>
*/
public class TeamRoutesActivity extends AppCompatActivity implements TeamsRoutesObserver {
private static final String TAG = "WWR_TeamRoutesActivity";
public static final int REQUEST_CODE = 5120;
private TeamsDatabaseService mTeamsDb;
private SharedPreferences mPreferences;
private DocumentSnapshot mLastRouteDocSnapshot;
private List<Route> mTeamRoutes = new ArrayList<>();
private RouteRecyclerViewAdapter adapter;
private RecyclerView mTeamRv;
private IUser mCurrentUser;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_team_routes);
mPreferences = getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
setUpDatabase();
getCurrentUser();
getUIElements();
getTeamRoutes();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RouteDetailsActivity.REQUEST_CODE && resultCode == Activity.RESULT_OK) {
Log.d(TAG, "onActivityResult: returning to team activity to record walk");
returnToTeamActivityForWalk(data);
}
}
private void returnToTeamActivityForWalk(Intent data) {
setResult(Activity.RESULT_OK, data);
finish();
}
private void setUpDatabase() {
mTeamsDb = (TeamsDatabaseService) FirebaseApplicationWWR.getDatabaseServiceFactory().createDatabaseService(DatabaseService.Service.TEAMS);
mTeamsDb.register(this);
}
private void getCurrentUser() {
String teamUid = Utils.getString(mPreferences, IUser.TEAM_UID_KEY);
String displayName = Utils.getString(mPreferences, IUser.USER_NAME_KEY);
String email = Utils.getString(mPreferences, IUser.EMAIL_KEY);
mCurrentUser = FirebaseUserAdapter.builder()
.addDisplayName(displayName)
.addEmail(email)
.addTeamUid(teamUid)
.build();
}
private void getTeamRoutes() {
mTeamsDb.getUserTeamRoutes(mCurrentUser.teamUid(), mCurrentUser.getDisplayName(), 10, mLastRouteDocSnapshot);
}
private void getUIElements() {
mTeamRv = findViewById(R.id.recycler_view_team_routes);
setScrollListener(mTeamRv);
adapter = new RouteRecyclerViewAdapter(this, mTeamRoutes, mPreferences);
mTeamRv.setAdapter(adapter);
mTeamRv.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false));
}
private void setScrollListener(RecyclerView view) {
// view.addOnScrollListener(new RecyclerView.OnScrollListener() {
// @Override
// public void onScrollStateChanged(@NonNull RecyclerView recyclerView, int newState) {
// super.onScrollStateChanged(recyclerView, newState);
// }
//
// @Override
// public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) {
// super.onScrolled(recyclerView, dx, dy);
// if (dy > 100 && Utils.checkNotNull(mLastRouteDocSnapshot)) {
// getTeamRoutes();
// }
// }
//
// });
}
@Override
public void onRoutesRetrieved(List<Route> routes, DocumentSnapshot lastRoute) {
mTeamRoutes.addAll(routes);
Collections.sort(mTeamRoutes);
mLastRouteDocSnapshot = lastRoute;
adapter.notifyDataSetChanged();
if (Utils.checkNotNull(lastRoute)) {
getTeamRoutes();
}
}
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/mockedservices/TestDatabaseServiceFactory.java
package com.cse110team24.walkwalkrevolution.mockedservices;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.DatabaseServiceFactory;
/**
* Creates instances from one of 3 test implementations of {@link DatabaseService}.
*/
public class TestDatabaseServiceFactory implements DatabaseServiceFactory {
@Override
public DatabaseService createDatabaseService(DatabaseService.Service service) {
switch (service) {
case USERS:
return new TestUsersDatabaseService();
case TEAMS:
return new TestTeamsDatabaseService();
case INVITATIONS:
return new TestInvitationsDatabaseService();
default:
return null;
}
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingServiceWWR.java
package com.cse110team24.walkwalkrevolution.firebase.messaging;
import com.cse110team24.walkwalkrevolution.application.FirebaseApplicationWWR;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
public class FirebaseMessagingServiceWWR extends FirebaseMessagingService {
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
super.onMessageReceived(remoteMessage);
}
@Override
public void onNewToken(String token) {
FirebaseApplicationWWR applicationWWR = (FirebaseApplicationWWR) getApplication();
applicationWWR.notifyObserversNewToken(token);
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/utils/Subject.java
package com.cse110team24.walkwalkrevolution.utils;
public interface Subject<TObserver> {
void register(TObserver observer);
void deregister(TObserver observer);
}
<file_sep>/milestone1_tests/LoginActivitySignUpEspressoTest.java
package com.cse110team24.walkwalkrevolution;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import androidx.test.espresso.ViewInteraction;
import androidx.test.filters.LargeTest;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.runner.AndroidJUnit4;
import com.cse110team24.walkwalkrevolution.application.FirebaseApplicationWWR;
import com.cse110team24.walkwalkrevolution.firebase.auth.AuthService;
import com.cse110team24.walkwalkrevolution.firebase.auth.AuthServiceFactory;
import com.cse110team24.walkwalkrevolution.firebase.auth.FirebaseAuthServiceFactory;
import com.cse110team24.walkwalkrevolution.firebase.firestore.DatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.DatabaseServiceFactory;
import com.cse110team24.walkwalkrevolution.firebase.firestore.FirestoreDatabaseServiceFactory;
import com.cse110team24.walkwalkrevolution.firebase.messaging.MessagingService;
import com.cse110team24.walkwalkrevolution.firebase.messaging.FirebaseMessagingServiceFactory;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.Matchers.allOf;
@LargeTest
@RunWith(AndroidJUnit4.class)
public class LoginActivitySignUpEspressoTest {
@Rule
public MockActivityTestRule<LoginActivity> mActivityTestRule = new MockActivityTestRule<>(LoginActivity.class);
@Mock
AuthServiceFactory asf;
DatabaseServiceFactory dsf;
@Before
public void setup() {
// mActivityTestRule.nextSignIn = true;
// mActivityTestRule.nextSuccessStatus = true;
// mActivityTestRule.asf = Mockito.mock(FirebaseAuthServiceFactory.class);
// mActivityTestRule.dsf = Mockito.mock(FirestoreDatabaseServiceFactory.class);
// mActivityTestRule.msf = Mockito.mock(FirebaseMessagingServiceFactory.class);
// mActivityTestRule.mDb = Mockito.mock(DatabaseService.class);
// mActivityTestRule.mMsg = Mockito.mock(MessagingService.class);
//
// // TODO: 3/2/20 tell Mockito what to return when these methods get called. Should return custom, test implementations of the service interfaces
// Mockito.when(mActivityTestRule.asf.createAuthService()).thenReturn(mActivityTestRule.mAuth);
// Mockito.when(mActivityTestRule.dsf.createDatabaseService()).thenReturn(mActivityTestRule.mDb);
// Mockito.when(mActivityTestRule.msf.createMessagingService(mActivityTestRule.getActivity(), mActivityTestRule.mDb)).thenReturn(mActivityTestRule.mMsg);
//
// FirebaseApplicationWWR.setDatabaseServiceFactory(mActivityTestRule.dsf);
// FirebaseApplicationWWR.setAuthServiceFactory(mActivityTestRule.asf);
// FirebaseApplicationWWR.setMessagingServiceFactory(mActivityTestRule.msf);
}
@Test
public void loginActivitySignUpEspressoTest() {
setup();
FirebaseApplicationWWR.setAuthServiceFactory(asf);
FirebaseApplicationWWR.setDatabaseServiceFactory(dsf);
ViewInteraction button = onView(
allOf(withId(R.id.no_login_btn), isDisplayed()));
button.check(matches(isDisplayed()));
ViewInteraction button2 = onView(
allOf(withId(R.id.btn_height_finish), isDisplayed()));
button2.check(matches(isDisplayed()));
ViewInteraction textView = onView(
allOf(withId(R.id.sign_up_tv), withText("Don't have an account? Sign up here"), isDisplayed()));
textView.check(matches(withText("Don't have an account? Sign up here")));
ViewInteraction appCompatTextView = onView(
allOf(withId(R.id.sign_up_tv), withText("Don't have an account? Sign up here"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
10),
isDisplayed()));
appCompatTextView.perform(click());
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.enter_gmail_address),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText.perform(replaceText("<EMAIL>"), closeSoftKeyboard());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.enter_password),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
2),
isDisplayed()));
appCompatEditText2.perform(replaceText("testpw"), closeSoftKeyboard());
ViewInteraction appCompatEditText3 = onView(
allOf(withId(R.id.enter_username),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
3),
isDisplayed()));
appCompatEditText3.perform(replaceText("test"), closeSoftKeyboard());
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.et_height_feet),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
5),
isDisplayed()));
appCompatEditText4.perform(replaceText("5"), closeSoftKeyboard());
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.et_height_remainder_inches),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
7),
isDisplayed()));
appCompatEditText5.perform(replaceText("3"), closeSoftKeyboard());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.btn_height_finish), withText("Sign Up"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
9),
isDisplayed()));
appCompatButton.perform(click());
ViewInteraction textView2 = onView(
allOf(withText("WalkWalkRevolution"), isDisplayed()));
textView2.check(matches(withText("WalkWalkRevolution")));
}
private static Matcher<View> childAtPosition(
final Matcher<View> parentMatcher, final int position) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("Child at position " + position + " in parent ");
parentMatcher.describeTo(description);
}
@Override
public boolean matchesSafely(View view) {
ViewParent parent = view.getParent();
return parent instanceof ViewGroup && parentMatcher.matches(parent)
&& view.equals(((ViewGroup) parent).getChildAt(position));
}
};
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/DatabaseServiceFactory.java
package com.cse110team24.walkwalkrevolution.firebase.firestore;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
public interface DatabaseServiceFactory {
DatabaseService createDatabaseService(DatabaseService.Service service);
}
<file_sep>/app/src/test/java/com/cse110team24/walkwalkrevolution/MockActivityUnitTest.java
package com.cse110team24.walkwalkrevolution;
import android.content.Intent;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import androidx.test.core.app.ActivityScenario;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertTrue;
@RunWith(AndroidJUnit4.class)
public class MockActivityUnitTest {
private static final String ENTER_START_TIME_PROMPT_TXT = "Enter Desired Start Time:";
private static final String ENTER_END_TIME_PROMPT_TXT = "Enter Desired End Time:";
private TextView addedStepsNum;
private TextView enterTimePrompt;
private Button finishBtn;
private Button addStepsBtn;
private EditText editTime;
@Before
public void setup() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), MockActivity.class);
intent.putExtra(MockActivity.START_WALK_BTN_VISIBILITY_KEY, View.VISIBLE);
ActivityScenario<MockActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIFields(activity);
});
}
@Test
public void testMockScreenBeforeStartingWalk() {
assertEquals(addedStepsNum.getText().toString(), "0");
assertEquals(enterTimePrompt.getText().toString(), ENTER_START_TIME_PROMPT_TXT);
assertFalse(finishBtn.isEnabled());
}
@Test
public void testMockScreenAfterStartingWalk() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), MockActivity.class);
intent.putExtra(MockActivity.START_WALK_BTN_VISIBILITY_KEY, View.INVISIBLE);
ActivityScenario<MockActivity> endScenario = ActivityScenario.launch(intent);
endScenario.onActivity(activity -> {
getUIFields(activity);
assertEquals(enterTimePrompt.getText().toString(), ENTER_END_TIME_PROMPT_TXT);
});
}
@Test
public void testAddOneIncrement() {
addStepsBtn.performClick();
assertEquals(addedStepsNum.getText().toString(), "500");
}
@Test
public void testAddMultipleIncrements() {
for(int i = 0; i < 4; i++) {
addStepsBtn.performClick();
}
assertEquals(addedStepsNum.getText().toString(), "2000");
}
@Test
public void testInvalidFormatTime() {
editTime.setText("12345678");
assertEquals(editTime.getText().toString(), "12345678");
assertFalse(finishBtn.isEnabled());
}
@Test
public void testValidFormatInvalidTime() {
editTime.setText("12:45:78");
assertEquals(editTime.getText().toString(), "12:45:78");
assertFalse(finishBtn.isEnabled());
}
@Test
public void testValidTime() {
editTime.setText("16:42:23");
assertEquals(editTime.getText().toString(), "16:42:23");
assertTrue(finishBtn.isEnabled());
}
private void getUIFields(MockActivity activity) {
addedStepsNum = activity.findViewById(R.id.tv_added_steps);
enterTimePrompt = activity.findViewById(R.id.tv_enter_time);
finishBtn = activity.findViewById(R.id.btn_mock_finish);
addStepsBtn = activity.findViewById(R.id.btn_increment_steps);
editTime = activity.findViewById(R.id.et_edit_time);
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FirebaseFirestoreAdapterUsers.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.adapters;
import android.util.Log;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users.UsersDatabaseServiceObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users.UsersUserDataObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users.UsersUserExistsObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.UsersDatabaseService;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.google.firebase.firestore.CollectionReference;
import com.google.firebase.firestore.DocumentReference;
import com.google.firebase.firestore.DocumentSnapshot;
import com.google.firebase.firestore.FirebaseFirestore;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter.TEAM_UID_KEY;
/**
* {@inheritDoc}
* This type's database provider is Cloud Firestore. The document path for a user is
* users/\{user\}.
*/
public class FirebaseFirestoreAdapterUsers implements UsersDatabaseService {
private static final String TAG = "WWR_FirebaseFirestoreAdapterUsers";
public static final String USERS_COLLECTION_KEY = "users";
public static final String USER_REGISTRATION_TOKENS_COLLECTION_KEY = "tokens";
public static final String TOKEN_SET_KEY = "token";
private CollectionReference usersCollection;
private FirebaseFirestore firebaseFirestore;
List<UsersDatabaseServiceObserver> observers = new ArrayList<>();
public FirebaseFirestoreAdapterUsers() {
firebaseFirestore = FirebaseFirestore.getInstance();
usersCollection = firebaseFirestore.collection(USERS_COLLECTION_KEY);
}
@Override
public void createUserInDatabase(IUser user) {
Map<String, Object> userData = user.userData();
DocumentReference userDocument= usersCollection.document(user.documentKey());
userDocument.set(userData).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "createUserInDatabase: successfully created document in \"users\" collection for user " + user.getDisplayName());
} else {
Log.e(TAG, "createUserInDatabase: failed to create document", task.getException());
}
});
}
@Override
public void updateUserTeamUidInDatabase(IUser user, String teamUid) {
DocumentReference documentReference = usersCollection.document(user.documentKey());
documentReference.update(TEAM_UID_KEY, teamUid).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "updateUserTeam: successfully updated user's team uid");
} else {
Log.e(TAG, "updateUserTeam: error updating team uid", task.getException());
}
});
}
@Override
public void getUserData(IUser user) {
DocumentReference documentReference = usersCollection.document(user.documentKey());
documentReference.get().addOnCompleteListener(task -> {
if (task.isSuccessful() && task.getResult() != null) {
notifyObserversUserData(task.getResult().getData());
}
});
}
@Override
public void notifyObserversUserData(Map<String, Object> userDataMap) {
observers.forEach(observer -> {
if(observer instanceof UsersUserDataObserver) {
((UsersUserDataObserver) observer).onUserData(userDataMap);
}
});
}
@Override
public void checkIfOtherUserExists(String userDocumentKey) {
DocumentReference otherUserDoc = usersCollection.document(userDocumentKey);
otherUserDoc.get().addOnCompleteListener(task -> {
if (task.isSuccessful() && task.getResult() != null) {
notifyObserversIfUserExists(task.getResult().exists(), buildUser(task.getResult()));
}
});
}
// build user, only gives display name and teamUid
private IUser buildUser(DocumentSnapshot data) {
if (data.exists()) {
return FirebaseUserAdapter.builder()
.addDisplayName(data.getString("displayName"))
.addTeamUid(data.getString("teamUid"))
.build();
} else {
return null;
}
}
@Override
public void notifyObserversIfUserExists(boolean exists, IUser otherUser) {
observers.forEach(observer -> {
if (observer instanceof UsersUserExistsObserver) {
UsersUserExistsObserver userExistsObserver = (UsersUserExistsObserver) observer;
if (exists) {
userExistsObserver.onUserExists(otherUser);
} else {
userExistsObserver.onUserDoesNotExist();
}
}
});
}
@Override
public void register(UsersDatabaseServiceObserver usersDatabaseServiceObserver) {
observers.add(usersDatabaseServiceObserver);
}
@Override
public void deregister(UsersDatabaseServiceObserver usersDatabaseServiceObserver) {
observers.remove(usersDatabaseServiceObserver);
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/models/user/FirebaseUserAdapterBuilder.java
package com.cse110team24.walkwalkrevolution.models.user;
import com.google.firebase.auth.FirebaseUser;
public interface FirebaseUserAdapterBuilder extends UserBuilder {
UserBuilder addFirebaseUser(FirebaseUser firebaseUser);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/activities/userroutes/SaveRouteActivity.java
package com.cse110team24.walkwalkrevolution.activities.userroutes;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.cse110team24.walkwalkrevolution.R;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.route.RouteEnvironment;
import com.cse110team24.walkwalkrevolution.models.route.WalkStats;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.Utils;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.widget.Button;
import android.widget.EditText;
import android.widget.RadioGroup;
import android.widget.Toast;
public class SaveRouteActivity extends AppCompatActivity {
private static final String TAG = "WWR_SaveRouteActivity";
public static final String WALK_STATS_KEY = "walk_stats";
public static final String NEW_ROUTE_KEY = "new_route";
public static final int REQUEST_CODE = 7;
private Route route;
private RouteEnvironment env;
private WalkStats stats;
private EditText editTextTitle;
private EditText editTextLocation;
private EditText editTextNotes;
private RadioGroup routeTypeRdGroup;
private RadioGroup terrainTypeRdGroup;
private RadioGroup surfaceTypeRdGroup;
private RadioGroup landTypeRdGroup;
private RadioGroup difficultyTypeRdGroup;
private Button doneButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_save_route);
getUIFields();
env = new RouteEnvironment();
stats = (WalkStats) getIntent().getSerializableExtra(WALK_STATS_KEY);
doneButton.setOnClickListener(view -> {
String title = editTextTitle.getText().toString();
createRoute(title);
if(route == null) {
return;
}
Intent intent = new Intent()
.putExtra(NEW_ROUTE_KEY, route);
setResult(Activity.RESULT_OK, intent);
Log.i(TAG, "onCreate: new route created: " + route);
finish();
});
}
private void getUIFields() {
editTextTitle = findViewById(R.id.et_save_route_title);
editTextLocation = findViewById(R.id.et_save_route_location);
editTextNotes = findViewById(R.id.et_route_notes);
routeTypeRdGroup = findViewById(R.id.radiogroup_route_type);
terrainTypeRdGroup = findViewById(R.id.rd_group_terrain_type);
surfaceTypeRdGroup = findViewById(R.id.rd_group_surface_type);
landTypeRdGroup = findViewById(R.id.rd_group_land_type);
difficultyTypeRdGroup = findViewById(R.id.rd_group_difficulty);
doneButton = findViewById(R.id.btn_save_route);
}
private void createRoute(String title) {
if (title.isEmpty()) {
Toast.makeText(getApplicationContext(), "Title is required", Toast.LENGTH_SHORT).show();
route = null;
}
else {
String creatorName = Utils.getString(getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE), IUser.USER_NAME_KEY);
createRouteEnv();
route = new Route.Builder(title)
.addStartingLocation(editTextLocation.getText().toString())
.addNotes(editTextNotes.getText().toString())
.addRouteEnvironment(env)
.addWalkStats(stats)
.addCreatorDisplayName(creatorName)
.build();
}
}
private void createRouteEnv() {
setRouteType();
setTerrainType();
setSurfaceType();
setTrailType();
setDifficulty();
}
private void setRouteType() {
switch(routeTypeRdGroup.getCheckedRadioButtonId()) {
case R.id.radio_btn_loop:
env.setRouteType(RouteEnvironment.RouteType.LOOP);
break;
case R.id.radio_btn_out_back:
env.setRouteType(RouteEnvironment.RouteType.OUT_AND_BACK);
}
}
private void setTerrainType() {
switch(terrainTypeRdGroup.getCheckedRadioButtonId()) {
case R.id.rd_btn_flat:
env.setTerrainType(RouteEnvironment.TerrainType.FLAT);
break;
case R.id.rd_btn_hilly:
env.setTerrainType(RouteEnvironment.TerrainType.HILLY);
}
}
private void setSurfaceType() {
switch(surfaceTypeRdGroup.getCheckedRadioButtonId()) {
case R.id.rd_btn_even:
env.setSurfaceType(RouteEnvironment.SurfaceType.EVEN);
break;
case R.id.rd_btn_uneven:
env.setSurfaceType(RouteEnvironment.SurfaceType.UNEVEN);
}
}
private void setTrailType() {
switch(landTypeRdGroup.getCheckedRadioButtonId()) {
case R.id.rd_btn_street:
env.setTrailType(RouteEnvironment.TrailType.STREETS);
break;
case R.id.rd_btn_trail:
env.setTrailType(RouteEnvironment.TrailType.TRAIL);
}
}
private void setDifficulty() {
switch(difficultyTypeRdGroup.getCheckedRadioButtonId()) {
case R.id.rd_btn_hard:
env.setDifficulty(RouteEnvironment.Difficulty.HARD);
break;
case R.id.rd_btn_moderate:
env.setDifficulty(RouteEnvironment.Difficulty.MODERATE);
break;
case R.id.rd_btn_easy:
env.setDifficulty(RouteEnvironment.Difficulty.EASY);
}
}
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/FavoriteWalkOnTeamRoutesBDDEspressoTest.java
package com.cse110team24.walkwalkrevolution;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import androidx.test.espresso.ViewInteraction;
import com.cse110team24.walkwalkrevolution.fitness.FitnessServiceFactory;
import com.cse110team24.walkwalkrevolution.mockedservices.MockActivityTestRule;
import com.cse110team24.walkwalkrevolution.mockedservices.TestAuth;
import com.cse110team24.walkwalkrevolution.mockedservices.TestFitnessService;
import com.cse110team24.walkwalkrevolution.mockedservices.TestTeamsDatabaseService;
import com.cse110team24.walkwalkrevolution.mockedservices.TestUsersDatabaseService;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.team.TeamAdapter;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.contrib.RecyclerViewActions.actionOnItemAtPosition;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static com.cse110team24.walkwalkrevolution.mockedservices.TestFitnessService.TEST_SERVICE_KEY;
import static org.hamcrest.Matchers.allOf;
/**
Given that I login successfully,
and click "Team",
When I click "See Teammate Routes",
Then I will see our group's routes,
When I favorite one of them,
Then it will show a star indicating I like that route.
*/
public class FavoriteWalkOnTeamRoutesBDDEspressoTest {
@Rule
public MockActivityTestRule<LoginActivity> mActivityTestRule = new MockActivityTestRule<>(LoginActivity.class);
@Before
public void setup() {
FitnessServiceFactory.put(TEST_SERVICE_KEY, activity -> new TestFitnessService(activity));
mActivityTestRule.getActivity().setFitnessServiceKey(TEST_SERVICE_KEY);
TestAuth.isTestUserSignedIn = true;
TestAuth.successUserSignedIn = true;
TestUsersDatabaseService.testCurrentUserData = new HashMap<>();
TestUsersDatabaseService.testCurrentUserData.put("displayName", "Emulator");
TestUsersDatabaseService.testCurrentUserData.put("email", "<EMAIL>");
TestUsersDatabaseService.testCurrentUserData.put("teamUid", "666");
TestAuth.testAuthUser = FirebaseUserAdapter.builder()
.addDisplayName("Emulator")
.addEmail("<EMAIL>")
.addTeamUid("666")
.build();
TestTeamsDatabaseService.testTeamRoutes = new ArrayList<>();
TestTeamsDatabaseService.testTeamRoutes.add(new Route.Builder("Title").addCreatorDisplayName("Ass Face").addRouteUid("357").build());
TestTeamsDatabaseService.testTeam = new TeamAdapter(new ArrayList<>());
}
@Test
public void checkmarkPreviouslyRunWalk() {
setup();
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.enter_gmail_address), isDisplayed()));
appCompatEditText.perform(replaceText("<EMAIL>"), closeSoftKeyboard());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.enter_password), isDisplayed()));
appCompatEditText2.perform(replaceText("1234jam"), closeSoftKeyboard());
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.et_height_feet), isDisplayed()));
appCompatEditText4.perform(replaceText("5"), closeSoftKeyboard());
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.et_height_remainder_inches), isDisplayed()));
appCompatEditText5.perform(replaceText("7"), closeSoftKeyboard());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.btn_height_finish), withText("Login"), isDisplayed()));
appCompatButton.perform(click());
ViewInteraction bottomNavigationItemView2 = onView(
allOf(withId(R.id.action_team), withContentDescription("Team"), isDisplayed()));
bottomNavigationItemView2.perform(click());
ViewInteraction appCompatButton5 = onView(
allOf(withId(R.id.btn_team_activity_see_teammate_routes), withText("See Teammate Routes"), isDisplayed()));
appCompatButton5.perform(click());
ViewInteraction appCompatButton6 = onView(
allOf(withId(R.id.btn_routes_favorite),
childAtPosition(
allOf(withId(R.id.routes_container),
childAtPosition(
withId(R.id.recycler_view_team_routes),
0)),
4),
isDisplayed()));
appCompatButton6.perform(click());
}
private static Matcher<View> childAtPosition(
final Matcher<View> parentMatcher, final int position) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("Child at position " + position + " in parent ");
parentMatcher.describeTo(description);
}
@Override
public boolean matchesSafely(View view) {
ViewParent parent = view.getParent();
return parent instanceof ViewGroup && parentMatcher.matches(parent)
&& view.equals(((ViewGroup) parent).getChildAt(position));
}
};
}
}
<file_sep>/app/src/test/java/com/cse110team24/walkwalkrevolution/ScheduledWalkActivityUnitTest.java
package com.cse110team24.walkwalkrevolution;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.provider.ContactsContract;
import android.widget.Button;
import android.widget.TextView;
import androidx.test.core.app.ActivityScenario;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.cse110team24.walkwalkrevolution.activities.teams.ScheduledProposedWalkActivity;
import com.cse110team24.walkwalkrevolution.activities.teams.TeamActivity;
import com.cse110team24.walkwalkrevolution.activities.userroutes.SaveRouteActivity;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsDatabaseServiceObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamWalksObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalk;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.google.firebase.Timestamp;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.robolectric.Shadows;
import org.robolectric.shadows.ShadowActivity;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static junit.framework.TestCase.assertEquals;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
@RunWith(AndroidJUnit4.class)
public class ScheduledWalkActivityUnitTest extends TestInjection {
ActivityScenario<ScheduledProposedWalkActivity> scenario;
SharedPreferences sp;
TextView location_btn;
TeamsTeamWalksObserver observer;
List<TeamWalk> listWithLoc;
@Before
public void setup() {
super.setup();
sp = ApplicationProvider.getApplicationContext().getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
sp.edit().putString(IUser.EMAIL_KEY, testUser.getEmail())
.putString(IUser.USER_NAME_KEY, testUser.getDisplayName())
.commit();
Mockito.when(dsf.createDatabaseService(DatabaseService.Service.USERS)).thenReturn(usersDatabaseService);
Mockito.when(dsf.createDatabaseService(DatabaseService.Service.TEAMS)).thenReturn(teamsDatabaseService);
Route routeWithLoc = new Route.Builder("route with location")
.addStartingLocation("Center Hall")
.build();
listWithLoc = new ArrayList<>();
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm a");
Date date = null;
try {
date = sdf.parse("12/10/2099 12:00 PM");
} catch (ParseException e) {
e.printStackTrace();
}
Timestamp timestamp = new Timestamp(date);
TeamWalk teamWalk = new TeamWalk.Builder()
.addProposedBy("user")
.addProposedRoute(routeWithLoc)
.addProposedDateAndTime(timestamp)
.build();
listWithLoc.add(teamWalk);
}
private void getUIFields(Activity activity) {
location_btn = activity.findViewById(R.id.schedule_propose_tv_starting_loc_display);
}
private void registerObserver() {
Mockito.doAnswer(invocation -> {
observer = invocation.getArgument(0);
return invocation;
}).when(teamsDatabaseService).register(any());
}
@Test
public void launchGoogleMapsWithLocation() {
registerObserver();
Mockito.doAnswer(invocation -> {
observer.onTeamWalksRetrieved(listWithLoc);
return null;
}).when(teamsDatabaseService).getLatestTeamWalksDescendingOrder(any(), eq(1));
scenario = ActivityScenario.launch(ScheduledProposedWalkActivity.class);
scenario.onActivity(activity -> {
Mockito.verify(teamsDatabaseService).register(any());
Mockito.verify(teamsDatabaseService).getLatestTeamWalksDescendingOrder(any(), eq(1));
getUIFields(activity);
location_btn.performClick();
ShadowActivity shadowActivity = Shadows.shadowOf(activity);
Intent actualIntent = shadowActivity.getNextStartedActivity();
assertEquals(actualIntent.getAction(), Intent.ACTION_VIEW);
assertEquals(actualIntent.getData().toString(), "http://maps.google.co.in/maps?q=" + location_btn.getText().toString());
});
}
}<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/auth/FirebaseAuthAdapter.java
package com.cse110team24.walkwalkrevolution.firebase.auth;
import android.util.Log;
import androidx.annotation.NonNull;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter.Builder;
import com.google.android.gms.tasks.Task;
import com.google.firebase.FirebaseNetworkException;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseAuthInvalidCredentialsException;
import com.google.firebase.auth.FirebaseAuthInvalidUserException;
import com.google.firebase.auth.FirebaseAuthUserCollisionException;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.auth.UserProfileChangeRequest;
import java.util.ArrayList;
import java.util.List;
/**
* {@inheritDoc}
* The provider for this class is Firebase.
*/
public class FirebaseAuthAdapter implements Auth, FirebaseAuth.AuthStateListener {
private static String TAG = "WWR_FirebaseAuthAdapter";
private FirebaseAuth mAuth;
private FirebaseUser mFirebaseUser;
private Builder mUserAdapterBuilder;
private AuthError mAuthError;
private boolean signUp;
private List<AuthObserver> observers;
public FirebaseAuthAdapter() {
mAuth = FirebaseAuth.getInstance();
mFirebaseUser = mAuth.getCurrentUser();
mUserAdapterBuilder = new Builder();
mUserAdapterBuilder.addFirebaseUser(mFirebaseUser);
mAuth.addAuthStateListener(this);
observers = new ArrayList<>();
}
@Override
public void signIn(String email, String password) {
Log.i(TAG, "signIn: beginning sign in process");
signUp = false;
mAuth.signInWithEmailAndPassword(email, password)
.addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "onComplete: user sign-in successful");
buildUserEssentials(email);
mUserAdapterBuilder.addDisplayName(mFirebaseUser.getDisplayName());
notifyObserversSignedIn(mUserAdapterBuilder.build());
} else {
Log.e(TAG, "signIn: user sign-in failed", task.getException());
detectErrorType(task);
notifyObserversSignInError(mAuthError);
}
});
}
@Override
public void signUp(String email, String password, String displayName) {
Log.i(TAG, "signUp: beginning sign up process");
signUp = true;
mAuth.createUserWithEmailAndPassword(email, password)
.addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "signUp: user creation successful");
buildUserEssentials(email);
setFirebaseUserDisplayName(displayName);
mUserAdapterBuilder.addDisplayName(displayName);
notifyObserversSignedUp(mUserAdapterBuilder.build());
} else {
Log.e(TAG, "signUp: user creation failed", task.getException());
detectErrorType(task);
notifyObserversSignUpError(mAuthError);
}
});
}
private void setFirebaseUserDisplayName(String displayName) {
UserProfileChangeRequest profileUpdates = new UserProfileChangeRequest.Builder()
.setDisplayName(displayName)
.build();
mFirebaseUser.updateProfile(profileUpdates)
.addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.d(TAG, "User profile updated.");
}
});
}
@Override
public IUser getUser() {
return mUserAdapterBuilder.build();
}
@Override
public boolean isUserSignedIn() {
return mFirebaseUser != null;
}
@Override
public AuthError getAuthError() {
return mAuthError;
}
private void detectErrorType(Task<AuthResult> task) {
Exception exception = task.getException();
if (exception instanceof FirebaseAuthUserCollisionException) {
mAuthError = AuthError.USER_COLLISION;
} else if (exception instanceof FirebaseAuthInvalidUserException) {
mAuthError = AuthError.DOES_NOT_EXIST;
} else if (exception instanceof FirebaseAuthInvalidCredentialsException) {
mAuthError = AuthError.INVALID_PASSWORD;
} else if (exception instanceof FirebaseNetworkException){
mAuthError = AuthError.NETWORK_ERROR;
}
else {
mAuthError = AuthError.OTHER;
}
}
private void buildUserEssentials(String email) {
mUserAdapterBuilder.addEmail(email);
mFirebaseUser = mAuth.getCurrentUser();
mUserAdapterBuilder.addFirebaseUser(mFirebaseUser);
mUserAdapterBuilder.addUid(mFirebaseUser.getUid());
}
@Override
public void register(AuthObserver authObserver) {
observers.add(authObserver);
}
@Override
public void deregister(AuthObserver authObserver) {
observers.remove(authObserver);
}
@Override
public void onAuthStateChanged(@NonNull FirebaseAuth firebaseAuth) {
mFirebaseUser = firebaseAuth.getCurrentUser();
if (isUserSignedIn()) {
buildUserEssentials(mFirebaseUser.getEmail());
}
}
@Override
public void notifyObserversSignedIn(IUser user) {
observers.forEach(observer -> {
observer.onUserSignedIn(user);
});
}
@Override
public void notifyObserversSignedUp(IUser user) {
observers.forEach(observer -> {
observer.onUserSignedUp(user);
});
}
@Override
public void notifyObserversSignInError(AuthError error) {
observers.forEach(observer -> {
observer.onAuthSignInError(error);
});
}
@Override
public void notifyObserversSignUpError(AuthError error) {
observers.forEach(observer -> {
observer.onAuthSignUpError(error);
});
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FireBaseFireStoreAdapterTeams.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.adapters;
import android.util.Log;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsDatabaseServiceObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsRoutesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamStatusesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamWalksObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeammatesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.TeamsDatabaseService;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.route.RouteEnvironment;
import com.cse110team24.walkwalkrevolution.models.route.WalkStats;
import com.cse110team24.walkwalkrevolution.models.team.ITeam;
import com.cse110team24.walkwalkrevolution.models.team.TeamAdapter;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalk;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalkStatus;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeammateStatus;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.Utils;
import com.google.android.gms.tasks.Task;
import com.google.firebase.Timestamp;
import com.google.firebase.firestore.CollectionReference;
import com.google.firebase.firestore.DocumentReference;
import com.google.firebase.firestore.DocumentSnapshot;
import com.google.firebase.firestore.FirebaseFirestore;
import com.google.firebase.firestore.Query;
import com.google.firebase.firestore.QuerySnapshot;
import com.google.firebase.firestore.SetOptions;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* {@inheritDoc}
* This type's database provider is Cloud Firestore. The document path for a team is
* teams/\{team\}. The document path for a teammate is teams/{\team\}/teammates/\{teammate}.
* The document path for a teammate route is teams/{\team\}/routes/\{route}.
*/
public class FireBaseFireStoreAdapterTeams implements TeamsDatabaseService {
private static final String TAG = "WWR_FirebaseFirestoreAdapterTeams";
public static final String TEAMS_COLLECTION_KEY = "teams";
public static final String TEAMMATES_SUB_COLLECTION = "teammates";
public static final String TEAM_ROUTES_SUB_COLLECTION_KEY = "routes";
private CollectionReference mTeamsCollection;
public FireBaseFireStoreAdapterTeams() {
FirebaseFirestore firebaseFirestore = FirebaseFirestore.getInstance();
mTeamsCollection = firebaseFirestore.collection(TEAMS_COLLECTION_KEY);
}
@Override
public String createTeamInDatabase(IUser user) {
Log.d(TAG, "createTeamInDatabase: creating team");
// create new team document and update user's teamUid
DocumentReference teamDocument = mTeamsCollection.document();
String teamUid = teamDocument.getId();
// create the teammates collection and the individual member document
CollectionReference teamSubCollection = teamDocument.collection(TEAMMATES_SUB_COLLECTION);
DocumentReference memberDocument = teamSubCollection.document(user.documentKey());
memberDocument.set(user.userData()).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "createTeamInDatabase: successfully created team document");
} else {
Log.e(TAG, "createTeamInDatabase: error creating team document", task.getException());
}
});
return teamUid;
}
@Override
public void addUserToTeam(IUser user, String teamUid) {
// teamsCollection/teamDocument/teammatesCollection/userDocument
DocumentReference teamDocument = mTeamsCollection.document(teamUid);
CollectionReference teammatesCollection = teamDocument.collection(TEAMMATES_SUB_COLLECTION);
teammatesCollection.document(user.documentKey()).set(user.userData()).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "addUserToTeam: successfully updated team");
} else {
Log.e(TAG, "addUserToTeam: error updating team", task.getException());
}
});
}
@Override
public void getUserTeam(String teamUid, String currentUserDisplayName) {
DocumentReference documentReference = mTeamsCollection.document(teamUid);
CollectionReference teammatesCollection = documentReference.collection(TEAMMATES_SUB_COLLECTION);
teammatesCollection.orderBy("displayName").get().addOnCompleteListener(task -> {
if (task.isSuccessful() && task.getResult() != null) {
Log.i(TAG, "getUserTeam: team successfully retrieved");
List<DocumentSnapshot> documents = task.getResult().getDocuments();
ITeam team = getTeamList(documents, currentUserDisplayName);
notifyObserversTeamRetrieved(team);
} else {
Log.e(TAG, "getUserTeam: error getting user team", task.getException());
}
});
}
private ITeam getTeamList(List<DocumentSnapshot> documents, String currentUserDisplayName) {
ITeam team = new TeamAdapter(new ArrayList<>());
for (DocumentSnapshot member : documents) {
String displayName = (String) member.get("displayName");
// skip the current user
if (currentUserDisplayName.equals(displayName)) continue;
IUser user = FirebaseUserAdapter.builder()
.addDisplayName(displayName)
.addLatestWalkStatus(TeammateStatus.get(member.getString("status")))
.build();
team.addMember(user);
}
return team;
}
@Override
public void getUserTeamRoutes(String teamUid, String currentUserDisplayName, int routeLimitCount, DocumentSnapshot lastRoute) {
Log.d(TAG, "getUserTeamRoutes: teamUid " + teamUid + " currentDisplayName " + currentUserDisplayName);
// return routes ordered by name, skipping routes that current user owns
Query routesQuery = getRoutesQuery(teamUid, currentUserDisplayName, routeLimitCount, lastRoute, 0);
routesQuery.get().addOnCompleteListener(task -> {
if (task.isSuccessful() && task.getResult() != null) {
QuerySnapshot resultDocs = task.getResult();
Log.i(TAG, "getUserTeamRoutes: " + resultDocs.size() + " routes retrieved with names < current user");
List<Route> routes = getRoutes(resultDocs);
// try to grab more routes if not enough grabbed
if (resultDocs.size() < routeLimitCount) {
getUserTeamRoutesGreaterThan(routes, teamUid, currentUserDisplayName, routeLimitCount - resultDocs.size(), lastRoute);
return;
}
getLastVisibleDoc(resultDocs, routes);
} else {
Log.e(TAG, "getUserTeamRoutes: could not retrieve team routes", task.getException());
}
});
}
private void getLastVisibleDoc(QuerySnapshot resultDocs, List<Route> routes) {
DocumentSnapshot lastVisible = null;
if (resultDocs.size() > 0) {
lastVisible = resultDocs.getDocuments().get(resultDocs.size() - 1);
}
notifyObserversTeamRoutesRetrieved(routes, lastVisible);
}
private List<Route> getRoutes(QuerySnapshot resultDocs) {
List<Route> routes = new ArrayList<>(resultDocs.size());
resultDocs.getDocuments().forEach(documentSnapshot -> routes.add(buildRoute(documentSnapshot)));
return routes;
}
// build the query as ordered by teammate name, limited by routeLimitCount. Skips current user via < and > clauses
private Query getRoutesQuery(String teamUid, String currentUserDisplayName, int routeLimitCount, DocumentSnapshot lastRoute, int order) {
Query routesQuery = mTeamsCollection
.document(teamUid)
.collection(TEAM_ROUTES_SUB_COLLECTION_KEY);
if (order == 0) {
routesQuery = routesQuery.whereLessThan("createdBy", currentUserDisplayName);
} else {
routesQuery = routesQuery.whereGreaterThan("createdBy", currentUserDisplayName);
}
routesQuery = routesQuery.orderBy("createdBy").limit(routeLimitCount);
if (lastRoute != null) {
routesQuery = routesQuery.startAfter(lastRoute);
}
return routesQuery;
}
// get the team routes for user's with names greater than currentUserDisplayName
private void getUserTeamRoutesGreaterThan(List<Route> routes, String teamUid, String currentUserDisplayName, int routeLimitCount, DocumentSnapshot lastRoute) {
Query routesQuery = getRoutesQuery(teamUid, currentUserDisplayName, routeLimitCount, lastRoute, 1);
routesQuery.get().addOnCompleteListener(task -> {
if (task.isSuccessful() && task.getResult() != null) {
QuerySnapshot resultDocs = task.getResult();
Log.i(TAG, "getUserTeamRoutes: " + resultDocs.size() + " routes retrieved with names > current user");
routes.addAll(getRoutes(resultDocs));
getLastVisibleDoc(resultDocs, routes);
} else {
Log.e(TAG, "getUserTeamRoutes: could not retrieve team routes", task.getException());
}
});
}
private Route buildRoute(DocumentSnapshot routeDoc) {
WalkStats stats = null;
Object data = routeDoc.get("stats");
if (data != null) {
stats = buildWalkStats((Map<String, Object>) data);
}
return new Route.Builder(routeDoc.getString("title"))
.addCreatorDisplayName(routeDoc.getString("createdBy"))
.addWalkStats(stats)
.addRouteEnvironment(buildRouteEnvironment((Map<String, Object>) routeDoc.get("environment")))
.addNotes(routeDoc.getString("notes"))
.addStartingLocation(routeDoc.getString("startingLocation"))
.addRouteUid(routeDoc.getId())
.build();
}
private WalkStats buildWalkStats(Map<String, Object> data) {
Long steps = Utils.getValueOrNull("steps", data);
Double distance = Utils.getValueOrNull("distance", data);
Long timeElapsed = Utils.getValueOrNull("elapsedTimeMillis", data);
Timestamp time = Utils.getValueOrNull("date", data);
Calendar calendarInstance = Calendar.getInstance();
calendarInstance.setTime(time.toDate());
return new WalkStats(steps, timeElapsed, distance, calendarInstance);
}
private RouteEnvironment buildRouteEnvironment(Map<String, Object> data) {
RouteEnvironment.Difficulty difficulty = data.get("difficulty") == null ? null : RouteEnvironment.Difficulty.valueOf((String) data.get("difficulty"));
RouteEnvironment.RouteType routeType = data.get("routeType") == null ? null : RouteEnvironment.RouteType.valueOf((String) data.get("routeType"));
RouteEnvironment.SurfaceType surfaceType = data.get("surfaceType") == null ? null : RouteEnvironment.SurfaceType.valueOf((String) data.get("surfaceType"));
RouteEnvironment.TerrainType terrainType = data.get("terrainType") == null ? null : RouteEnvironment.TerrainType.valueOf((String) data.get("terrainType"));
RouteEnvironment.TrailType trailType = data.get("trailType") == null ? null : RouteEnvironment.TrailType.valueOf((String) data.get("trailType"));
return RouteEnvironment.builder()
.addDifficulty(difficulty)
.addRouteType(routeType)
.addSurfaceType(surfaceType)
.addTerrainType(terrainType)
.addTrailType(trailType)
.build();
}
@Override
public void uploadRoute(String teamUid, Route route) {
// upload to teams/{team}/routes/{route}
DocumentReference routeDoc = mTeamsCollection
.document(teamUid)
.collection(TEAM_ROUTES_SUB_COLLECTION_KEY)
.document();
route.setRouteUid(routeDoc.getId());
routeDoc.set(route.routeData()).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "uploadRoute: route uploaded successfully");
} else {
Log.e(TAG, "uploadRoute: route failed to upload", task.getException());
}
});
}
@Override
public void updateRoute(String teamUid, Route route) {
// update in teams/{team}/routes/{route}
DocumentReference routeDoc = mTeamsCollection
.document(teamUid)
.collection(TEAM_ROUTES_SUB_COLLECTION_KEY)
.document(route.getRouteUid());
routeDoc.set(route.routeData()).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "uploadRoute: success updated route " + route);
} else {
Log.e(TAG, "uploadRoute: error updating route.", task.getException());
}
});
}
// if walk doesn't exist, creates it first
@Override
public String updateCurrentTeamWalk(TeamWalk teamWalk) {
// update in teams/{team}/teamWalks/{teamWalk}
if (Utils.checkNotNull(teamWalk.getWalkUid())) {
mTeamsCollection.document(teamWalk.getTeamUid())
.collection("teamWalks")
.document(teamWalk.getWalkUid())
.update(teamWalk.dataInMapForm())
.addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "updateCurrentTeamWalk: Success updating team walk");
} else {
Log.e(TAG, "updateCurrentTeamWalk: error updating team walk, will attempt creation", task.getException());
// try creating it if failed
}
});
return teamWalk.getWalkUid();
} else {
return tryToCreateTeamWalkDoc(teamWalk);
}
}
private String tryToCreateTeamWalkDoc(TeamWalk teamWalk) {
DocumentReference docRef = mTeamsCollection.document(teamWalk.getTeamUid())
.collection("teamWalks")
.document();
docRef.set(teamWalk.dataInMapForm()).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "tryToCreateTeamWalkDoc: team walk document created");
} else {
Log.e(TAG, "tryToCreateTeamWalkDoc: error creating team walk document", task.getException());
}
});
return docRef.getId();
}
private TeamWalk buildTeamWalk(DocumentSnapshot documentSnapshot) {
Route proposedRoute = new Route.Builder("").addFieldsFromMap((Map<String, Object>) documentSnapshot.get("proposedRoute")).build();
TeamWalk teamWalk = TeamWalk.builder()
.addTeamUid(documentSnapshot.getString("teamUid"))
.addProposedBy(documentSnapshot.getString("proposedBy"))
.addProposedDateAndTime(documentSnapshot.getTimestamp("proposedDateAndTime"))
.addProposedRoute(proposedRoute)
.addWalkUid(documentSnapshot.getId())
.addStatus(TeamWalkStatus.valueOf(documentSnapshot.getString("status")))
.build();
return teamWalk;
}
@Override
public void getLatestTeamWalksDescendingOrder(String teamUid, int teamWalkLimitCt) {
Query query = mTeamsCollection.document(teamUid).collection("teamWalks")
.orderBy("proposedOn", Query.Direction.DESCENDING)
.limit(teamWalkLimitCt);
query.get().addOnCompleteListener(task -> {
List<TeamWalk> teamWalks = new ArrayList<>();
if (task.isSuccessful() && Utils.checkNotNull(task.getResult())) {
Log.i(TAG, "getLatestTeamWalksDescendingOrder: success getting team walks");
QuerySnapshot result = task.getResult();
result.getDocuments().forEach(documentSnapshot -> {
teamWalks.add(buildTeamWalk(documentSnapshot));
});
} else {
Log.e(TAG, "getLatestTeamWalksDescendingOrder: error getting team walks", task.getException());
}
notifyObserversTeamWalksRetrieved(teamWalks);
});
}
@Override
public void changeTeammateStatusForLatestWalk(IUser user, TeamWalk teamWalk, TeammateStatus changedStatus) {
Map<String, Object> data = new HashMap<>();
data.put("displayName", user.getDisplayName());
data.put("status", changedStatus.getReason());
// teams/{team}/teamWalks/{teamWalk}/teammateStatuses/{teammate}
DocumentReference teammateStatusDocument = mTeamsCollection
.document(user.teamUid())
.collection("teamWalks")
.document(teamWalk.getWalkUid())
.collection("teammateStatuses")
.document(user.documentKey());
teammateStatusDocument
.update(data).addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "changeTeammateStatus: success updating teammate status");
} else {
tryToSetTeammateStatus(data, teammateStatusDocument);
}
});
}
@Override
public void getTeammateStatusesForTeamWalk(TeamWalk teamWalk, String teamUid) {
// first get all teammate documents to know who are teammates
mTeamsCollection.document(teamUid)
.collection("teammates")
.get().addOnCompleteListener(task -> {
SortedMap<String, String> data = new TreeMap<>();
if (task.isSuccessful() && task.getResult() != null) {
Log.d(TAG, "getTeammateStatusesForTeamWalk: success");
addToMap(task.getResult().getDocuments(), data, teamWalk, teamUid, 0);
} else {
Log.e(TAG, "getTeammateStatusesForTeamWalk: error", task.getException());
notifyObserversTeamWalkStatusesRetrieved(data);
}
});
}
private void addToMap(List<DocumentSnapshot> documentSnapshots, SortedMap<String, String> data, TeamWalk teamWalk, String teamUid, int position) {
Set<String> teammateNames = new HashSet<>();
documentSnapshots.forEach(documentSnapshot -> teammateNames.add(documentSnapshot.getString("displayName")));
// get all status documents
mTeamsCollection.document(teamUid)
.collection("teamWalks")
.document(teamWalk.getWalkUid())
.collection("teammateStatuses")
.get().addOnCompleteListener(task -> {
if (task.isSuccessful() && task.getResult() != null) {
Log.d(TAG, "addToMap: success");
// for each teammate with a status update it to the data map; remove them from set to know who is missing
task.getResult().getDocuments().forEach(documentSnapshot -> {
getUserNameAndStatus(data, documentSnapshot);
teammateNames.remove(documentSnapshot.getString("displayName"));
});
addPendingStatusForRemainingTeammates(teammateNames, data);
notifyObserversTeamWalkStatusesRetrieved(data);
} else {
Log.e(TAG, "addToMap: error", task.getException());
notifyObserversTeamWalkStatusesRetrieved(data);
}
});
}
private void getUserNameAndStatus(SortedMap<String, String> data, DocumentSnapshot documentSnapshot) {
String status = documentSnapshot.getString("status");
String displayName = documentSnapshot.getString("displayName");
data.put(displayName, status);
}
private void addPendingStatusForRemainingTeammates(Set<String> teammateNames, SortedMap<String, String> data) {
String status = TeammateStatus.PENDING.getReason();
teammateNames.forEach(teammateName -> data.put(teammateName, status));
}
private void tryToSetTeammateStatus(Map<String, Object> data, DocumentReference statusDocument) {
statusDocument
.set(data, SetOptions.merge())
.addOnCompleteListener(task -> {
if (task.isSuccessful()) {
Log.i(TAG, "tryToSetTeammateStatus: success setting teammate status for first time");
} else {
Log.e(TAG, "tryToSetTeammateStatus: error updating and setting teammate status", task.getException());
}
});
}
private List<TeamsDatabaseServiceObserver> observers = new ArrayList<>();
@Override
public void notifyObserversTeamRetrieved(ITeam team) {
observers.forEach(observer -> {
if (observer instanceof TeamsTeammatesObserver) {
((TeamsTeammatesObserver) observer).onTeamRetrieved(team);
}
});
}
@Override
public void notifyObserversTeamRoutesRetrieved(List<Route> routes, DocumentSnapshot lastRoute) {
observers.forEach(observer -> {
if (observer instanceof TeamsRoutesObserver) {
((TeamsRoutesObserver) observer).onRoutesRetrieved(routes, lastRoute);
}
});
}
@Override
public void notifyObserversTeamWalksRetrieved(List<TeamWalk> walks) {
observers.forEach(observer -> {
if (observer instanceof TeamsTeamWalksObserver) {
((TeamsTeamWalksObserver) observer).onTeamWalksRetrieved(walks);
}
});
}
@Override
public void notifyObserversTeamWalkStatusesRetrieved(SortedMap<String, String> statusData) {
observers.forEach(observer -> {
if (observer instanceof TeamsTeamStatusesObserver) {
((TeamsTeamStatusesObserver) observer).onTeamWalkStatusesRetrieved(statusData);
}
});
}
@Override
public void register(TeamsDatabaseServiceObserver observer) {
observers.add(observer);
}
@Override
public void deregister(TeamsDatabaseServiceObserver observer) {
observers.remove(observer);
}
}
<file_sep>/app/documentation/index-files/index-13.html
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_212-release) on Mon Mar 09 12:46:15 PDT 2020 -->
<title>O-Index</title>
<meta name="date" content="2020-03-09">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="O-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-12.html">Prev Letter</a></li>
<li><a href="index-14.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-13.html" target="_top">Frames</a></li>
<li><a href="index-13.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">F</a> <a href="index-7.html">G</a> <a href="index-8.html">H</a> <a href="index-9.html">I</a> <a href="index-10.html">L</a> <a href="index-11.html">M</a> <a href="index-12.html">N</a> <a href="index-13.html">O</a> <a href="index-14.html">P</a> <a href="index-15.html">R</a> <a href="index-16.html">S</a> <a href="index-17.html">T</a> <a href="index-18.html">U</a> <a href="index-19.html">V</a> <a href="index-20.html">W</a> <a name="I:O">
<!-- -->
</a>
<h2 class="title">O</h2>
<dl>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestAuth.TestAuthImplementation.html#observer">observer</a></span> - Variable in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestAuth.TestAuthImplementation.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestAuth.TestAuthImplementation</a></dt>
<dd>
<div class="block">The single observer who will be notified of method calls.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestUsersDatabaseService.html#observer">observer</a></span> - Variable in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestUsersDatabaseService.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestUsersDatabaseService</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onActivityResult-int-int-android.content.Intent-">onActivityResult(int, int, Intent)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html#onActivityResult-int-int-android.content.Intent-">onActivityResult(int, int, Intent)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html" title="class in com.cse110team24.walkwalkrevolution">HomeActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html#onAuthSignInError-com.cse110team24.walkwalkrevolution.firebase.auth.Auth.AuthError-">onAuthSignInError(Auth.AuthError)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.auth">AuthObserver</a></dt>
<dd>
<div class="block">Called when the AuthServiceSubject this observer is observing fails to sign in the user</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html#onAuthSignInError-com.cse110team24.walkwalkrevolution.firebase.auth.Auth.AuthError-">onAuthSignInError(Auth.AuthError)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html" title="class in com.cse110team24.walkwalkrevolution">LoginActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html#onAuthSignUpError-com.cse110team24.walkwalkrevolution.firebase.auth.Auth.AuthError-">onAuthSignUpError(Auth.AuthError)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.auth">AuthObserver</a></dt>
<dd>
<div class="block">Called when the AuthServiceSubject this observer is observing fails to sign up the user</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html#onAuthSignUpError-com.cse110team24.walkwalkrevolution.firebase.auth.Auth.AuthError-">onAuthSignUpError(Auth.AuthError)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html" title="class in com.cse110team24.walkwalkrevolution">LoginActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/FirebaseAuthAdapter.html#onAuthStateChanged-com.google.firebase.auth.FirebaseAuth-">onAuthStateChanged(FirebaseAuth)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/FirebaseAuthAdapter.html" title="class in com.cse110team24.walkwalkrevolution.firebase.auth">FirebaseAuthAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html#onBackPressed--">onBackPressed()</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html" title="class in com.cse110team24.walkwalkrevolution">HomeActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteRecyclerViewAdapter.html#onBindViewHolder-com.cse110team24.walkwalkrevolution.activities.userroutes.RouteRecyclerViewAdapter.ViewHolder-int-">onBindViewHolder(RouteRecyclerViewAdapter.ViewHolder, int)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteRecyclerViewAdapter.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RouteRecyclerViewAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteRecyclerViewAdapter.ViewHolder.html#onClick-android.view.View-">onClick(View)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteRecyclerViewAdapter.ViewHolder.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RouteRecyclerViewAdapter.ViewHolder</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InvitationsActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InvitationsActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InvitationsActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InviteTeamMemberActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/InviteTeamToWalkActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/InviteTeamToWalkActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">InviteTeamToWalkActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">TeamActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamRoutesActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamRoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">TeamRoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteDetailsActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteDetailsActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RouteDetailsActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/SaveRouteActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/SaveRouteActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">SaveRouteActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/application/FirebaseApplicationWWR.html#onCreate--">onCreate()</a></span> - Method in class com.cse110team24.walkwalkrevolution.application.<a href="../com/cse110team24/walkwalkrevolution/application/FirebaseApplicationWWR.html" title="class in com.cse110team24.walkwalkrevolution.application">FirebaseApplicationWWR</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html" title="class in com.cse110team24.walkwalkrevolution">HomeActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html" title="class in com.cse110team24.walkwalkrevolution">LoginActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/MockActivity.html#onCreate-android.os.Bundle-">onCreate(Bundle)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/MockActivity.html" title="class in com.cse110team24.walkwalkrevolution">MockActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteDetailsActivity.html#onCreateOptionsMenu-android.view.Menu-">onCreateOptionsMenu(Menu)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteDetailsActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RouteDetailsActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onCreateOptionsMenu-android.view.Menu-">onCreateOptionsMenu(Menu)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteRecyclerViewAdapter.html#onCreateViewHolder-android.view.ViewGroup-int-">onCreateViewHolder(ViewGroup, int)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteRecyclerViewAdapter.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RouteRecyclerViewAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onDestroy--">onDestroy()</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html#onFailedInvitationSent-com.google.android.gms.tasks.Task-">onFailedInvitationSent(Task<?>)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InviteTeamMemberActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingObserver.html#onFailedInvitationSent-com.google.android.gms.tasks.Task-">onFailedInvitationSent(Task<?>)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.messaging.<a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.messaging">MessagingObserver</a></dt>
<dd>
<div class="block">Called by the MessagingSubject this observer is observing when the subject failed to send an
invitation.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html#onInvitationSent-com.cse110team24.walkwalkrevolution.models.invitation.Invitation-">onInvitationSent(Invitation)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InviteTeamMemberActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingObserver.html#onInvitationSent-com.cse110team24.walkwalkrevolution.models.invitation.Invitation-">onInvitationSent(Invitation)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.messaging.<a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.messaging">MessagingObserver</a></dt>
<dd>
<div class="block">Called by the MessagingSubject this observer is observing when the subject successfully
sent the invitation.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingServiceWWR.html#onMessageReceived-com.google.firebase.messaging.RemoteMessage-">onMessageReceived(RemoteMessage)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.messaging.<a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingServiceWWR.html" title="class in com.cse110team24.walkwalkrevolution.firebase.messaging">FirebaseMessagingServiceWWR</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/application/ApplicationObserver.html#onNewToken-java.lang.String-">onNewToken(String)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.application.<a href="../com/cse110team24/walkwalkrevolution/application/ApplicationObserver.html" title="interface in com.cse110team24.walkwalkrevolution.application">ApplicationObserver</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingServiceWWR.html#onNewToken-java.lang.String-">onNewToken(String)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.messaging.<a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingServiceWWR.html" title="class in com.cse110team24.walkwalkrevolution.firebase.messaging">FirebaseMessagingServiceWWR</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteDetailsActivity.html#onOptionsItemSelected-android.view.MenuItem-">onOptionsItemSelected(MenuItem)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RouteDetailsActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RouteDetailsActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onOptionsItemSelected-android.view.MenuItem-">onOptionsItemSelected(MenuItem)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html#onResume--">onResume()</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">TeamActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onResume--">onResume()</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamRoutesActivity.html#onRoutesRetrieved-java.util.List-com.google.firebase.firestore.DocumentSnapshot-">onRoutesRetrieved(List<Route>, DocumentSnapshot)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamRoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">TeamRoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsRoutesObserver.html#onRoutesRetrieved-java.util.List-com.google.firebase.firestore.DocumentSnapshot-">onRoutesRetrieved(List<Route>, DocumentSnapshot)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsRoutesObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams">TeamsRoutesObserver</a></dt>
<dd>
<div class="block">Called by the TeamsDatabaseServiceSubject this observer is observing when the requested team routes
data is ready to be read.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html#onStop--">onStop()</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.userroutes.<a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/RoutesActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.userroutes">RoutesActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html#onTeamRetrieved-com.cse110team24.walkwalkrevolution.models.team.ITeam-">onTeamRetrieved(ITeam)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">TeamActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsTeammatesObserver.html#onTeamRetrieved-com.cse110team24.walkwalkrevolution.models.team.ITeam-">onTeamRetrieved(ITeam)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsTeammatesObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams">TeamsTeammatesObserver</a></dt>
<dd>
<div class="block">Called by the TeamsDatabaseServiceSubject this observer is observing when the requested teammates
list data is ready to be read.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsTeamWalksObserver.html#onTeamWalksRetrieved-java.util.List-">onTeamWalksRetrieved(List<TeamWalk>)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsTeamWalksObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams">TeamsTeamWalksObserver</a></dt>
<dd>
<div class="block">Called by the TeamsDatabaseServiceSubject this observer is observing when the requested team walks
list data is ready to be read.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html#onUserData-java.util.Map-">onUserData(Map<String, Object>)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InviteTeamMemberActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/UsersUserDataObserver.html#onUserData-java.util.Map-">onUserData(Map<String, Object>)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/UsersUserDataObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users">UsersUserDataObserver</a></dt>
<dd>
<div class="block">Called by the UsersDatabaseServiceSubject this observer is observing when requested user data is ready
to read.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html#onUserData-java.util.Map-">onUserData(Map<String, Object>)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/HomeActivity.html" title="class in com.cse110team24.walkwalkrevolution">HomeActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html#onUserDoesNotExist--">onUserDoesNotExist()</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InviteTeamMemberActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/UsersUserExistsObserver.html#onUserDoesNotExist--">onUserDoesNotExist()</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/UsersUserExistsObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users">UsersUserExistsObserver</a></dt>
<dd>
<div class="block">Called by the UsersDatabaseServiceSubject this observer is observing when the requested user
does not exist in the subject's provider database.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html#onUserExists-com.cse110team24.walkwalkrevolution.models.user.IUser-">onUserExists(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InviteTeamMemberActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InviteTeamMemberActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/UsersUserExistsObserver.html#onUserExists-com.cse110team24.walkwalkrevolution.models.user.IUser-">onUserExists(IUser)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/UsersUserExistsObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users">UsersUserExistsObserver</a></dt>
<dd>
<div class="block">Called by the UsersDatabaseServiceSubject this observer is observing when the requested user
exists in the subject's provider database.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InvitationsActivity.html#onUserPendingInvitations-java.util.List-">onUserPendingInvitations(List<Invitation>)</a></span> - Method in class com.cse110team24.walkwalkrevolution.activities.invitations.<a href="../com/cse110team24/walkwalkrevolution/activities/invitations/InvitationsActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.invitations">InvitationsActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/InvitationsDatabaseServiceObserver.html#onUserPendingInvitations-java.util.List-">onUserPendingInvitations(List<Invitation>)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.observers.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/InvitationsDatabaseServiceObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.observers">InvitationsDatabaseServiceObserver</a></dt>
<dd>
<div class="block">Called by the InvitationsDatabaseServiceSubject this observer is observing when the requested invitations
list is ready to be read.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html#onUserSignedIn-com.cse110team24.walkwalkrevolution.models.user.IUser-">onUserSignedIn(IUser)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.auth">AuthObserver</a></dt>
<dd>
<div class="block">Called when the AuthServiceSubject this observer is observing signs in the user</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html#onUserSignedIn-com.cse110team24.walkwalkrevolution.models.user.IUser-">onUserSignedIn(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html" title="class in com.cse110team24.walkwalkrevolution">LoginActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html#onUserSignedUp-com.cse110team24.walkwalkrevolution.models.user.IUser-">onUserSignedUp(IUser)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.auth">AuthObserver</a></dt>
<dd>
<div class="block">Called when the AuthServiceSubject this observer is observing signs up the user</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html#onUserSignedUp-com.cse110team24.walkwalkrevolution.models.user.IUser-">onUserSignedUp(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/LoginActivity.html" title="class in com.cse110team24.walkwalkrevolution">LoginActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/InviteTeamMemberActivityUnitTest.html#otherUserHasTeam--">otherUserHasTeam()</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/InviteTeamMemberActivityUnitTest.html" title="class in com.cse110team24.walkwalkrevolution">InviteTeamMemberActivityUnitTest</a></dt>
<dd> </dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">F</a> <a href="index-7.html">G</a> <a href="index-8.html">H</a> <a href="index-9.html">I</a> <a href="index-10.html">L</a> <a href="index-11.html">M</a> <a href="index-12.html">N</a> <a href="index-13.html">O</a> <a href="index-14.html">P</a> <a href="index-15.html">R</a> <a href="index-16.html">S</a> <a href="index-17.html">T</a> <a href="index-18.html">U</a> <a href="index-19.html">V</a> <a href="index-20.html">W</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-12.html">Prev Letter</a></li>
<li><a href="index-14.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-13.html" target="_top">Frames</a></li>
<li><a href="index-13.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsTeamStatusesObserver.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams;
import com.google.firebase.firestore.DocumentSnapshot;
import java.util.List;
import java.util.SortedMap;
public interface TeamsTeamStatusesObserver extends TeamsDatabaseServiceObserver {
/**
* Called by the TeamsDatabaseServiceSubject this observer is observing when the requested team walks
* statuses data is ready to be read.
* <p>See also: {@link com.cse110team24.walkwalkrevolution.firebase.firestore.subjects.TeamsDatabaseServiceSubject#notifyObserversTeamWalkStatusesRetrieved(SortedMap)}</p>
* @param statusData map of teammate status data. Key = teammate display name and value = {@link com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalkStatus}
* in string form.
*/
void onTeamWalkStatusesRetrieved(SortedMap<String, String> statusData);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/FirestoreDatabaseServiceFactory.java
package com.cse110team24.walkwalkrevolution.firebase.firestore;
import com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.FireBaseFireStoreAdapterTeams;
import com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.FirebaseFirestoreAdapter;
import com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.FirebaseFirestoreAdapterInvitations;
import com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.FirebaseFirestoreAdapterUsers;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
/**
* Instantiates one of 4 Cloud Firestore implementations of {@link DatabaseService}
*/
public class FirestoreDatabaseServiceFactory implements DatabaseServiceFactory{
@Override
public DatabaseService createDatabaseService(DatabaseService.Service service) {
switch (service) {
case USERS:
return new FirebaseFirestoreAdapterUsers();
case TEAMS:
return new FireBaseFireStoreAdapterTeams();
case INVITATIONS:
return new FirebaseFirestoreAdapterInvitations();
}
return new FirebaseFirestoreAdapter();
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/auth/AuthFactory.java
package com.cse110team24.walkwalkrevolution.firebase.auth;
/**
* creates an instance of {@link Auth} types
*/
public interface AuthFactory {
Auth createAuthService();
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/models/team/walk/TeamWalkStatus.java
package com.cse110team24.walkwalkrevolution.models.team.walk;
public enum TeamWalkStatus {
PROPOSED,
SCHEDULED,
CANCELLED,
WITHDRAWN
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.java
package com.cse110team24.walkwalkrevolution.activities.teams;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import android.widget.ListView;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.cse110team24.walkwalkrevolution.activities.invitations.InvitationsActivity;
import com.cse110team24.walkwalkrevolution.activities.invitations.InviteTeamMemberActivity;
import com.cse110team24.walkwalkrevolution.R;
import com.cse110team24.walkwalkrevolution.activities.userroutes.RoutesActivity;
import com.cse110team24.walkwalkrevolution.application.FirebaseApplicationWWR;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeammatesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.TeamsDatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.UsersDatabaseService;
import com.cse110team24.walkwalkrevolution.models.team.ITeam;
import com.cse110team24.walkwalkrevolution.models.team.TeamAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.google.android.material.bottomnavigation.BottomNavigationView;
import java.util.ArrayList;
import java.util.List;
public class TeamActivity extends AppCompatActivity implements TeamsTeammatesObserver {
private static final String TAG = "WWR_TeamActivity";
public static final int REQUEST_CODE = 7851;
private Button sendInviteBtn;
private Button seeInvitationsBtn;
private Button seeTeammateRoutesBtn;
private Button seeScheduledWalksBtn;
private BottomNavigationView bottomNavigationView;
private TeamsDatabaseService mDb;
private UsersDatabaseService uDb;
SharedPreferences mPreferences;
private ITeam mTeam = new TeamAdapter(new ArrayList<>());
private String mTeamUid;
private SharedPreferences preferences;
//The following three fields are for fakeTesting() only, should delete afterwards.
private ListView teammatesList;
private TeammatesListViewAdapter teammatesListViewAdapter;
TextView noTeamMessage;
public Context context = this;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_team);
preferences = getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
setUpServices();
}
@Override
protected void onResume() {
super.onResume();
getUIFields();
getTeamUid();
setButtonClickListeners();
seeInvitationsBtn.setOnClickListener(view -> {
launchInvitationsActivity(view);
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == TeamRoutesActivity.REQUEST_CODE && resultCode == RESULT_OK) {
Log.d(TAG, "onActivityResult: recording team's walk");
setResult(Activity.RESULT_OK, data);
transitionWithAnimation();
}
}
private void transitionWithAnimation() {
Intent walkIntent = new Intent(getApplicationContext(), HomeActivity.class);
walkIntent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
startActivity(walkIntent);
finish();
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_right);
}
private void launchInvitationsActivity(View view) {
Intent intent = new Intent(this, InvitationsActivity.class);
startActivity(intent);
}
private void getTeamUid() {
mPreferences = getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
mTeamUid = mPreferences.getString(IUser.TEAM_UID_KEY, null);
if (mTeamUid == null) {
showNoTeamToast();
} else {
Log.d(TAG, "getTeamUid: team uid found, retrieving team");
seeTeammateRoutesBtn.setEnabled(true);
mDb.getUserTeam(mTeamUid, preferences.getString(IUser.USER_NAME_KEY, ""));
}
}
private void setUpServices() {
mDb = (TeamsDatabaseService) FirebaseApplicationWWR.getDatabaseServiceFactory().createDatabaseService(DatabaseService.Service.TEAMS);
mDb.register(this);
uDb = (UsersDatabaseService) FirebaseApplicationWWR.getDatabaseServiceFactory().createDatabaseService(DatabaseService.Service.USERS);
}
private void getUIFields() {
sendInviteBtn = findViewById(R.id.btn_team_activity_invite_team_members);
bottomNavigationView = findViewById(R.id.bottom_navigation);
seeInvitationsBtn = findViewById(R.id.btn_team_activity_pending_invites);
seeTeammateRoutesBtn = findViewById(R.id.btn_team_activity_see_teammate_routes);
seeScheduledWalksBtn = findViewById(R.id.btn_scheduled_walks);
noTeamMessage = findViewById(R.id.text_no_teammates);
teammatesList = findViewById(R.id.list_members_in_team);
teammatesListViewAdapter = new TeammatesListViewAdapter(this, mTeam.getTeam(), preferences);
noTeamMessage.setVisibility(View.GONE);
teammatesList.setAdapter(teammatesListViewAdapter);
seeScheduledWalksBtn = findViewById(R.id.btn_scheduled_walks);
}
private void setButtonClickListeners() {
setInviteButtonOnClick();
setBottomNavItemSelectedListener();
setSeeTeamRoutesOnClick();
setSeeScheduledWalksOnClick();
}
private void setInviteButtonOnClick() {
sendInviteBtn.setOnClickListener(view -> {
launchInviteRouteActivity();
});
}
private void setBottomNavItemSelectedListener() {
bottomNavigationView.setOnNavigationItemSelectedListener(menuItem -> {
if(menuItem.getItemId() == R.id.action_home) {
Intent myIntent = new Intent(getApplicationContext(), HomeActivity.class);
myIntent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
startActivity(myIntent);
}
if(menuItem.getItemId() == R.id.action_routes_list) {
Intent myIntent = new Intent(getApplicationContext(), RoutesActivity.class);
myIntent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
startActivity(myIntent);
}
return true;
});
}
private void setSeeTeamRoutesOnClick() {
seeTeammateRoutesBtn.setOnClickListener(v -> {
startActivityForResult(new Intent(this, TeamRoutesActivity.class), TeamRoutesActivity.REQUEST_CODE);
});
}
private void setSeeScheduledWalksOnClick() {
seeScheduledWalksBtn.setOnClickListener(v -> {
startActivity(new Intent(this, ScheduledProposedWalkActivity.class));
});
}
private void launchInviteRouteActivity() {
Intent intent = new Intent(this, InviteTeamMemberActivity.class);
startActivity(intent);
}
@Override
public void onTeamRetrieved(ITeam team) {
mTeam = team;
List<IUser> users = mTeam.getTeam();
TextView noTeamMessage = findViewById(R.id.text_no_teammates);
if(users.size() == 0) {
noTeamMessage.setVisibility(View.VISIBLE);
}else {
noTeamMessage.setVisibility(View.GONE);
}
ListView teammatesList = findViewById(R.id.list_members_in_team);
TeammatesListViewAdapter teammatesListViewAdapter = new TeammatesListViewAdapter(this, users, preferences);
teammatesList.setAdapter(teammatesListViewAdapter);
}
private void showNoTeamToast() {
Toast.makeText(this, "You don't have a team. Try sending an invitation!", Toast.LENGTH_LONG).show();
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/subjects/TeamsDatabaseServiceSubject.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.subjects;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsDatabaseServiceObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamWalksObserver;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.team.ITeam;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalk;
import com.cse110team24.walkwalkrevolution.utils.Subject;
import com.google.firebase.firestore.DocumentSnapshot;
import java.util.List;
import java.util.SortedMap;
public interface TeamsDatabaseServiceSubject extends Subject<TeamsDatabaseServiceObserver> {
/**
* Notify this subject's observers of type {@link com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeammatesObserver}
* that the requested team's teammates data is ready to read.
* See also: {@link com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeammatesObserver#onTeamRetrieved(ITeam)}.
* @param team the team that was retrieved. Does not include the currently signed in user.
*/
void notifyObserversTeamRetrieved(ITeam team);
/**
* Notify this subject's observers of type {@link com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsRoutesObserver}
* that the requested team's routes data is ready to be read.
* <p>See also: {@link com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsRoutesObserver#onRoutesRetrieved(List, DocumentSnapshot)}</p>
* @param routes the list of routes that was retrieved. Does not include the currently signed in user's routes.
* @param lastRoute the last route in the list's DocumentSnapshot.
*/
void notifyObserversTeamRoutesRetrieved(List<Route> routes, DocumentSnapshot lastRoute);
/**
* Notify this subject's observers of type {@link TeamsTeamWalksObserver} that the requested
* team's walks data is ready to be read.
* <p>See also: {@link TeamsTeamWalksObserver#onTeamWalksRetrieved(List)}</p>
* @param walks the list of TeamWalks that was retrieved.
*/
void notifyObserversTeamWalksRetrieved(List<TeamWalk> walks);
/**
* Notify this subject's observers of type {@link com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamStatusesObserver}
* that the requested team walk statuses data is ready to be read.
* <p>See also: {@link com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamStatusesObserver#onTeamWalkStatusesRetrieved(SortedMap)}</p>
* @param statusData map of teammate status data. Key = teammate display name and value = {@link com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalkStatus}
* in string form.
*/
void notifyObserversTeamWalkStatusesRetrieved(SortedMap<String, String> statusData);
}
<file_sep>/app/documentation/index-files/index-3.html
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_212-release) on Mon Mar 09 12:46:15 PDT 2020 -->
<title>C-Index</title>
<meta name="date" content="2020-03-09">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="C-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-2.html">Prev Letter</a></li>
<li><a href="index-4.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-3.html" target="_top">Frames</a></li>
<li><a href="index-3.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">F</a> <a href="index-7.html">G</a> <a href="index-8.html">H</a> <a href="index-9.html">I</a> <a href="index-10.html">L</a> <a href="index-11.html">M</a> <a href="index-12.html">N</a> <a href="index-13.html">O</a> <a href="index-14.html">P</a> <a href="index-15.html">R</a> <a href="index-16.html">S</a> <a href="index-17.html">T</a> <a href="index-18.html">U</a> <a href="index-19.html">V</a> <a href="index-20.html">W</a> <a name="I:C">
<!-- -->
</a>
<h2 class="title">C</h2>
<dl>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FirebaseFirestoreAdapterUsers.html#checkIfOtherUserExists-java.lang.String-">checkIfOtherUserExists(String)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FirebaseFirestoreAdapterUsers.html" title="class in com.cse110team24.walkwalkrevolution.firebase.firestore.adapters">FirebaseFirestoreAdapterUsers</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/UsersDatabaseService.html#checkIfOtherUserExists-java.lang.String-">checkIfOtherUserExists(String)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.services.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/UsersDatabaseService.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.services">UsersDatabaseService</a></dt>
<dd>
<div class="block">Query this service's provider database for a user given userDocumentKey.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestUsersDatabaseService.html#checkIfOtherUserExists-java.lang.String-">checkIfOtherUserExists(String)</a></span> - Method in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestUsersDatabaseService.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestUsersDatabaseService</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/utils/Utils.html#checkNotNull-java.lang.Object-">checkNotNull(Object)</a></span> - Static method in class com.cse110team24.walkwalkrevolution.utils.<a href="../com/cse110team24/walkwalkrevolution/utils/Utils.html" title="class in com.cse110team24.walkwalkrevolution.utils">Utils</a></dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/CheckTeamScreenBDDEspressoTest.html" title="class in com.cse110team24.walkwalkrevolution"><span class="typeNameLink">CheckTeamScreenBDDEspressoTest</span></a> - Class in <a href="../com/cse110team24/walkwalkrevolution/package-summary.html">com.cse110team24.walkwalkrevolution</a></dt>
<dd>
<div class="block">Scenario: User has a team, sees team members listed in app
Given that the user has a team,
When they click on the "Team" button from the home screen
Then a list of their fellow teammates will be displayed.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/CheckTeamScreenBDDEspressoTest.html#CheckTeamScreenBDDEspressoTest--">CheckTeamScreenBDDEspressoTest()</a></span> - Constructor for class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/CheckTeamScreenBDDEspressoTest.html" title="class in com.cse110team24.walkwalkrevolution">CheckTeamScreenBDDEspressoTest</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/CheckTeamScreenBDDEspressoTest.html#checkTeamScreenEspressoTest--">checkTeamScreenEspressoTest()</a></span> - Method in class com.cse110team24.walkwalkrevolution.<a href="../com/cse110team24/walkwalkrevolution/CheckTeamScreenBDDEspressoTest.html" title="class in com.cse110team24.walkwalkrevolution">CheckTeamScreenBDDEspressoTest</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/utils/Utils.html#cleanEmail-java.lang.String-">cleanEmail(String)</a></span> - Static method in class com.cse110team24.walkwalkrevolution.utils.<a href="../com/cse110team24/walkwalkrevolution/utils/Utils.html" title="class in com.cse110team24.walkwalkrevolution.utils">Utils</a></dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/package-summary.html">com.cse110team24.walkwalkrevolution</a> - package com.cse110team24.walkwalkrevolution</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/activities/invitations/package-summary.html">com.cse110team24.walkwalkrevolution.activities.invitations</a> - package com.cse110team24.walkwalkrevolution.activities.invitations</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/activities/teams/package-summary.html">com.cse110team24.walkwalkrevolution.activities.teams</a> - package com.cse110team24.walkwalkrevolution.activities.teams</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/activities/userroutes/package-summary.html">com.cse110team24.walkwalkrevolution.activities.userroutes</a> - package com.cse110team24.walkwalkrevolution.activities.userroutes</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/application/package-summary.html">com.cse110team24.walkwalkrevolution.application</a> - package com.cse110team24.walkwalkrevolution.application</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.auth</a> - package com.cse110team24.walkwalkrevolution.firebase.auth</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore.adapters</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore.adapters</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore.observers</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore.observers</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/observers/users/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore.observers.users</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore.services</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore.services</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/subjects/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.firestore.subjects</a> - package com.cse110team24.walkwalkrevolution.firebase.firestore.subjects</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/package-summary.html">com.cse110team24.walkwalkrevolution.firebase.messaging</a> - package com.cse110team24.walkwalkrevolution.firebase.messaging</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/fitness/package-summary.html">com.cse110team24.walkwalkrevolution.fitness</a> - package com.cse110team24.walkwalkrevolution.fitness</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/mockedservices/package-summary.html">com.cse110team24.walkwalkrevolution.mockedservices</a> - package com.cse110team24.walkwalkrevolution.mockedservices</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/models/invitation/package-summary.html">com.cse110team24.walkwalkrevolution.models.invitation</a> - package com.cse110team24.walkwalkrevolution.models.invitation</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/models/route/package-summary.html">com.cse110team24.walkwalkrevolution.models.route</a> - package com.cse110team24.walkwalkrevolution.models.route</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/models/team/package-summary.html">com.cse110team24.walkwalkrevolution.models.team</a> - package com.cse110team24.walkwalkrevolution.models.team</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/models/user/package-summary.html">com.cse110team24.walkwalkrevolution.models.user</a> - package com.cse110team24.walkwalkrevolution.models.user</dt>
<dd> </dd>
<dt><a href="../com/cse110team24/walkwalkrevolution/utils/package-summary.html">com.cse110team24.walkwalkrevolution.utils</a> - package com.cse110team24.walkwalkrevolution.utils</dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/models/route/Route.html#compareTo-com.cse110team24.walkwalkrevolution.models.route.Route-">compareTo(Route)</a></span> - Method in class com.cse110team24.walkwalkrevolution.models.route.<a href="../com/cse110team24/walkwalkrevolution/models/route/Route.html" title="class in com.cse110team24.walkwalkrevolution.models.route">Route</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html#context">context</a></span> - Variable in class com.cse110team24.walkwalkrevolution.activities.teams.<a href="../com/cse110team24/walkwalkrevolution/activities/teams/TeamActivity.html" title="class in com.cse110team24.walkwalkrevolution.activities.teams">TeamActivity</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/fitness/FitnessServiceFactory.BluePrint.html#create-com.cse110team24.walkwalkrevolution.HomeActivity-">create(HomeActivity)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.fitness.<a href="../com/cse110team24/walkwalkrevolution/fitness/FitnessServiceFactory.BluePrint.html" title="interface in com.cse110team24.walkwalkrevolution.fitness">FitnessServiceFactory.BluePrint</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/fitness/FitnessServiceFactory.html#create-java.lang.String-com.cse110team24.walkwalkrevolution.HomeActivity-">create(String, HomeActivity)</a></span> - Static method in class com.cse110team24.walkwalkrevolution.fitness.<a href="../com/cse110team24/walkwalkrevolution/fitness/FitnessServiceFactory.html" title="class in com.cse110team24.walkwalkrevolution.fitness">FitnessServiceFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthFactory.html#createAuthService--">createAuthService()</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/AuthFactory.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.auth">AuthFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/auth/FirebaseAuthFactory.html#createAuthService--">createAuthService()</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.auth.<a href="../com/cse110team24/walkwalkrevolution/firebase/auth/FirebaseAuthFactory.html" title="class in com.cse110team24.walkwalkrevolution.firebase.auth">FirebaseAuthFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestAuth.TestAuthFactory.html#createAuthService--">createAuthService()</a></span> - Method in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestAuth.TestAuthFactory.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestAuth.TestAuthFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/DatabaseServiceFactory.html#createDatabaseService-com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService.Service-">createDatabaseService(DatabaseService.Service)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/DatabaseServiceFactory.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore">DatabaseServiceFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/FirestoreDatabaseServiceFactory.html#createDatabaseService-com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService.Service-">createDatabaseService(DatabaseService.Service)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.firestore.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/FirestoreDatabaseServiceFactory.html" title="class in com.cse110team24.walkwalkrevolution.firebase.firestore">FirestoreDatabaseServiceFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestDatabaseServiceFactory.html#createDatabaseService-com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService.Service-">createDatabaseService(DatabaseService.Service)</a></span> - Method in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestDatabaseServiceFactory.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestDatabaseServiceFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingFactory.html#createMessagingService-android.app.Activity-com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService-">createMessagingService(Activity, DatabaseService)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.messaging.<a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingFactory.html" title="class in com.cse110team24.walkwalkrevolution.firebase.messaging">FirebaseMessagingFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingFactory.html#createMessagingService-android.app.Activity-com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService-">createMessagingService(Activity, DatabaseService)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.messaging.<a href="../com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingFactory.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.messaging">MessagingFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestMessage.TestMessagingFactory.html#createMessagingService-android.app.Activity-com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService-">createMessagingService(Activity, DatabaseService)</a></span> - Method in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestMessage.TestMessagingFactory.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestMessage.TestMessagingFactory</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FireBaseFireStoreAdapterTeams.html#createTeamInDatabase-com.cse110team24.walkwalkrevolution.models.user.IUser-">createTeamInDatabase(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FireBaseFireStoreAdapterTeams.html" title="class in com.cse110team24.walkwalkrevolution.firebase.firestore.adapters">FireBaseFireStoreAdapterTeams</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/TeamsDatabaseService.html#createTeamInDatabase-com.cse110team24.walkwalkrevolution.models.user.IUser-">createTeamInDatabase(IUser)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.services.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/TeamsDatabaseService.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.services">TeamsDatabaseService</a></dt>
<dd>
<div class="block">Create a team document in this service's provider database.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestTeamsDatabaseService.html#createTeamInDatabase-com.cse110team24.walkwalkrevolution.models.user.IUser-">createTeamInDatabase(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestTeamsDatabaseService.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestTeamsDatabaseService</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FirebaseFirestoreAdapterUsers.html#createUserInDatabase-com.cse110team24.walkwalkrevolution.models.user.IUser-">createUserInDatabase(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.firebase.firestore.adapters.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FirebaseFirestoreAdapterUsers.html" title="class in com.cse110team24.walkwalkrevolution.firebase.firestore.adapters">FirebaseFirestoreAdapterUsers</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/UsersDatabaseService.html#createUserInDatabase-com.cse110team24.walkwalkrevolution.models.user.IUser-">createUserInDatabase(IUser)</a></span> - Method in interface com.cse110team24.walkwalkrevolution.firebase.firestore.services.<a href="../com/cse110team24/walkwalkrevolution/firebase/firestore/services/UsersDatabaseService.html" title="interface in com.cse110team24.walkwalkrevolution.firebase.firestore.services">UsersDatabaseService</a></dt>
<dd>
<div class="block">Create a user document in this service's provider database.</div>
</dd>
<dt><span class="memberNameLink"><a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestUsersDatabaseService.html#createUserInDatabase-com.cse110team24.walkwalkrevolution.models.user.IUser-">createUserInDatabase(IUser)</a></span> - Method in class com.cse110team24.walkwalkrevolution.mockedservices.<a href="../com/cse110team24/walkwalkrevolution/mockedservices/TestUsersDatabaseService.html" title="class in com.cse110team24.walkwalkrevolution.mockedservices">TestUsersDatabaseService</a></dt>
<dd> </dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">F</a> <a href="index-7.html">G</a> <a href="index-8.html">H</a> <a href="index-9.html">I</a> <a href="index-10.html">L</a> <a href="index-11.html">M</a> <a href="index-12.html">N</a> <a href="index-13.html">O</a> <a href="index-14.html">P</a> <a href="index-15.html">R</a> <a href="index-16.html">S</a> <a href="index-17.html">T</a> <a href="index-18.html">U</a> <a href="index-19.html">V</a> <a href="index-20.html">W</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-2.html">Prev Letter</a></li>
<li><a href="index-4.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-3.html" target="_top">Frames</a></li>
<li><a href="index-3.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/observers/teams/TeamsTeammatesObserver.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsDatabaseServiceObserver;
import com.cse110team24.walkwalkrevolution.models.team.ITeam;
/**
* Listens only for changes of team.
*/
public interface TeamsTeammatesObserver extends TeamsDatabaseServiceObserver {
/**
* Called by the TeamsDatabaseServiceSubject this observer is observing when the requested teammates
* list data is ready to be read.
* <p>See also: {@link com.cse110team24.walkwalkrevolution.firebase.firestore.subjects.TeamsDatabaseServiceSubject#notifyObserversTeamRetrieved(ITeam)}</p>
* @param team the team that was retrieved. Does not include the currently signed in user.
*/
void onTeamRetrieved(ITeam team);
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/mockedservices/TestFitnessService.java
package com.cse110team24.walkwalkrevolution.mockedservices;
import android.app.Activity;
import android.content.Context;
import androidx.test.rule.ActivityTestRule;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.cse110team24.walkwalkrevolution.fitness.FitnessService;
import com.cse110team24.walkwalkrevolution.fitness.GoogleFitAdapter;
/**
* inject this service like this inside the @setup method:
* FitnessServiceFactory.put(TEST_SERVICE, activity -> new TestFitnessService(activity));
*/
public class TestFitnessService implements FitnessService {
public static final String TAG = "[TestFitnessService]: ";
public static final String TEST_SERVICE_KEY = "TEST_SERVICE";
public HomeActivity activity;
// set these whenever you need to
public static long nextStepCount;
public static long startTime;
public static long endTime;
public static long toAdd;
public static long beforeRecordingSteps;
public static long endRecordingSteps;
public static long timeElapsedMillis;
public TestFitnessService(HomeActivity activity) {
this.activity = activity;
}
@Override
public int getRequestCode() {
return 0;
}
@Override
public void setup() {
System.out.println(TAG + "setup");
}
@Override
public void updateDailyStepCount() {
System.out.println(TAG + "updateStepCount");
activity.setDailyStats(nextStepCount);
}
@Override
public void startRecording() {
}
@Override
public void stopRecording() {
activity.setLatestWalkStats(endRecordingSteps, timeElapsedMillis);
}
@Override
public double getDistanceFromHeight(long steps, int heightFeet, float heightRemainderInches) {
return new GoogleFitAdapter(null).getDistanceFromHeight(steps, heightFeet, heightRemainderInches);
}
@Override
public void setStartRecordingTime(long startTime) {
startTime = 0;
}
@Override
public void setEndRecordingTime(long startTime) {
}
@Override
public void setStepsToAdd(long stepsToAdd) {
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/firestore/adapters/FirebaseFirestoreAdapter.java
package com.cse110team24.walkwalkrevolution.firebase.firestore.adapters;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
/** TODO: 2/28/20 flow for a team should be
* get the user's account (sign in or up if necessary)
* Check if they have a team.
* check if user has team
* if yes, update UI
*
* if not, when user sends invite,
* instantiate team,
* add user as member of team,
* create team in database
* set team's new UID as teamUID for user
*
*/
// TODO: 3/3/20 split this adapter into three specialized service adapters
public class FirebaseFirestoreAdapter implements DatabaseService {
private static final String TAG = "WWR_FirebaseFirestoreAdapter";
public static final String ROUTES_COLLECTION_KEY = "routes";
public static final String TEAM_ID_KEY = "teamUid";
public FirebaseFirestoreAdapter() {
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/activities/teams/ScheduledProposedWalkActivity.java
package com.cse110team24.walkwalkrevolution.activities.teams;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.cse110team24.walkwalkrevolution.HomeActivity;
import com.cse110team24.walkwalkrevolution.R;
import com.cse110team24.walkwalkrevolution.application.FirebaseApplicationWWR;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamStatusesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeamWalksObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.observers.teams.TeamsTeammatesObserver;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.TeamsDatabaseService;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.UsersDatabaseService;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import com.cse110team24.walkwalkrevolution.models.team.ITeam;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalk;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeamWalkStatus;
import com.cse110team24.walkwalkrevolution.models.team.walk.TeammateStatus;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
import com.cse110team24.walkwalkrevolution.utils.Utils;
import java.util.ArrayList;
import java.util.List;
import java.util.SortedMap;
public class ScheduledProposedWalkActivity extends AppCompatActivity implements TeamsTeamWalksObserver, TeamsTeamStatusesObserver {
private static final String TAG = "WWR_ScheduledProposedWalkActivity";
private Button acceptBtn;
private Button declineCannotMakeItBtn;
private Button declineNotInterestedBtn;
private Button scheduleWalkBtn;
private Button withdrawCancelBtn;
private ListView teammateStatusList;
private TeammatesListViewAdapter statusListAdapter;
private TeamsDatabaseService mDb;
private TeamWalk mCurrentTeamWalk;
private IUser mCurrentUser;
private TeammateStatus mCurrentUserStatus;
SharedPreferences mPreferences;
private String mTeamUid;
private SharedPreferences preferences;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scheduled_proposed_walk);
preferences = getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
setUpServices();
getCurrentUser();
}
@Override
protected void onResume() {
super.onResume();
mDb.getLatestTeamWalksDescendingOrder(mTeamUid, 1);
}
private void setUpServices() {
mDb = (TeamsDatabaseService) FirebaseApplicationWWR.getDatabaseServiceFactory().createDatabaseService(DatabaseService.Service.TEAMS);
mDb.register(this);
}
private void getTeamUid() {
mPreferences = getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
mTeamUid = mPreferences.getString(IUser.TEAM_UID_KEY, null);
Log.d(TAG, "getTeamUid: team uid found, retrieving team");
}
private void getCurrentUser() {
getTeamUid();
mCurrentUser = FirebaseUserAdapter.builder()
.addDisplayName(mPreferences.getString(IUser.USER_NAME_KEY, ""))
.addTeamUid(mTeamUid)
.addEmail(mPreferences.getString(IUser.EMAIL_KEY, ""))
.build();
}
@Override
public void onTeamWalksRetrieved(List<TeamWalk> teamWalks) {
if(teamWalks.size() == 0) {
findViewById(R.id.no_proposed_or_scheduled_walks_tv).setVisibility(View.VISIBLE);
return;
}
mCurrentTeamWalk = teamWalks.get(0);
displayWalkStatusUIViews();
if (walkNotCancelledOrWithdrawn()) {
findViewById(R.id.tv_status_prompt).setVisibility(View.VISIBLE);
mCurrentTeamWalk.setTeamUid(mTeamUid);
mDb.getTeammateStatusesForTeamWalk(mCurrentTeamWalk, mTeamUid);
displayAppropriateUIViewsForUser();
}
}
private void displayWalkStatusUIViews() {
TextView walkStatusTv = findViewById(R.id.schedule_propose_tv_walk_status);
setStatusText(walkStatusTv);
if (!walkNotCancelledOrWithdrawn()) {
walkStatusTv.setTextColor(getColor(android.R.color.holo_red_dark));
}
walkStatusTv.setVisibility(View.VISIBLE);
}
private void setStatusText(TextView walkStatusTv) {
switch (mCurrentTeamWalk.getStatus()) {
case SCHEDULED:
walkStatusTv.setText(R.string.status_scheduled);
break;
case WITHDRAWN:
walkStatusTv.setText(R.string.status_withdrawn);
break;
case CANCELLED:
walkStatusTv.setText(R.string.status_cancelled);
break;
default:
walkStatusTv.setText(R.string.status_proposed);
}
}
private void displayAppropriateUIViewsForUser() {
displayCommonUIViews();
// current user proposed the walk
if (mCurrentTeamWalk.getProposedBy().equals(mCurrentUser.getDisplayName())) {
Log.i(TAG, "displayAppropriateUIViewsForUser: user proposed route");
displayProposerUIViews();
} else {
Log.i(TAG, "displayAppropriateUIViewsForUser: user was invited to route");
displayTeammateUIViews();
}
}
private void displayProposerUIViews() {
// TODO: 3/11/20 check status and change icon and text of cancel/withdraw button
findViewById(R.id.schedule_propose_linear_layout_decision_buttons).setVisibility(View.VISIBLE);
addClickListenersProposerButtons();
setCancelWithdrawBtnIconAndText();
}
private void setCancelWithdrawBtnIconAndText() {
if (mCurrentTeamWalk.getStatus() == TeamWalkStatus.SCHEDULED) {
withdrawCancelBtn.setText(R.string.cancel);
scheduleWalkBtn.setVisibility(View.GONE);
withdrawCancelBtn.setCompoundDrawablesRelativeWithIntrinsicBounds(R.drawable.ic_delete_forever_red_24dp, 0, 0, 0);
} else {
withdrawCancelBtn.setText(R.string.withdraw);
withdrawCancelBtn.setCompoundDrawablesRelativeWithIntrinsicBounds(R.drawable.ic_event_busy_black_24dp, 0, 0, 0);
}
}
private void addClickListenersProposerButtons() {
scheduleWalkBtn = findViewById(R.id.schedule_propose_btn_schedule);
scheduleWalkBtn.setOnClickListener(v -> scheduleWalkBtnClickListener());
withdrawCancelBtn = findViewById(R.id.schedule_propose_btn_withdraw);
withdrawCancelBtn.setOnClickListener(v -> withdrawCancelBtnClickListener());
}
private void scheduleWalkBtnClickListener() {
mCurrentTeamWalk.setStatus(TeamWalkStatus.SCHEDULED);
setCancelWithdrawBtnIconAndText();
mDb.updateCurrentTeamWalk(mCurrentTeamWalk);
displayWalkStatusUIViews();
}
private void withdrawCancelBtnClickListener() {
if(walkNotCancelledOrWithdrawn()) {
if (withdrawCancelBtn.getText().toString().equals(getString(R.string.cancel))) {
mCurrentTeamWalk.setStatus(TeamWalkStatus.CANCELLED);
} else {
mCurrentTeamWalk.setStatus(TeamWalkStatus.WITHDRAWN);
}
mDb.updateCurrentTeamWalk(mCurrentTeamWalk);
}
if (walkCancelledOrWithdrawn()) {
findViewById(R.id.schedule_propose_linear_layout_decision_buttons).setVisibility(View.GONE);
}
displayWalkStatusUIViews();
}
private void displayTeammateUIViews() {
findViewById(R.id.schedule_propose_linear_layout_status_buttons).setVisibility(View.VISIBLE);
addClickListenersTeammateButtons();
displayProposedByViews();
}
private void addClickListenersTeammateButtons() {
mCurrentUserStatus = TeammateStatus.get(mPreferences.getString(IUser.STATUS_TEAM_WALK, ""));
acceptBtn = findViewById(R.id.schedule_propose_btn_accept);
acceptBtn.setEnabled(true);
acceptBtn.setOnClickListener(v -> updateStatus(TeammateStatus.ACCEPTED));
declineCannotMakeItBtn = findViewById(R.id.schedule_propose_btn_decline_cant_come);
declineCannotMakeItBtn.setEnabled(true);
declineCannotMakeItBtn.setOnClickListener(v -> updateStatus(TeammateStatus.DECLINED_SCHEDULING_CONFLICT));
declineNotInterestedBtn = findViewById(R.id.schedule_propose_btn_decline_not_interested);
declineNotInterestedBtn.setEnabled(true);
declineNotInterestedBtn.setOnClickListener(v -> updateStatus(TeammateStatus.DECLINED_NOT_INTERESTED));
highLightCurrentStatusButton();
}
private void highLightCurrentStatusButton() {
String latestWalkUserAcceptedOrDeclined = preferences.getString("latestTeamWalk", "");
if(mCurrentUserStatus == null || !latestWalkUserAcceptedOrDeclined.equals(mCurrentTeamWalk.getWalkUid()))
return;
acceptBtn.setTextColor(getColor(R.color.colorBlack));
declineNotInterestedBtn.setTextColor(getColor(R.color.colorBlack));
declineCannotMakeItBtn.setTextColor(getColor(R.color.colorBlack));
switch (mCurrentUserStatus) {
case ACCEPTED:
acceptBtn.setTextColor(getColor(R.color.colorAccent));
break;
case DECLINED_NOT_INTERESTED:
declineNotInterestedBtn.setTextColor(getColor(R.color.colorAccent));
break;
case DECLINED_SCHEDULING_CONFLICT:
declineCannotMakeItBtn.setTextColor(getColor(R.color.colorAccent));
break;
}
}
// updates the teammate's status for the latest walk locally and in the database
private void updateStatus(TeammateStatus newStatus) {
mCurrentUserStatus = TeammateStatus.get(mPreferences.getString(IUser.STATUS_TEAM_WALK, ""));
if (mCurrentUserStatus == newStatus) {
Log.d(TAG, "updateStatus: current status was equal to newStatus " + newStatus);
Utils.showToast(this, "Please pick a new status", Toast.LENGTH_SHORT);
} else {
Log.d(TAG, "updateStatus: updated user status to " + newStatus);
mPreferences.edit()
.putString(IUser.STATUS_TEAM_WALK, newStatus.getReason())
.putString("latestTeamWalk", mCurrentTeamWalk.getWalkUid())
.apply();
mDb.changeTeammateStatusForLatestWalk(mCurrentUser, mCurrentTeamWalk, newStatus);
mCurrentUserStatus = newStatus;
highLightCurrentStatusButton();
}
}
// not for proposer
private void displayProposedByViews() {
findViewById(R.id.schedule_propose_tv_proposed_by_prompt).setVisibility(View.VISIBLE);
TextView proposedByDisplayTv = findViewById(R.id.schedule_propose_tv_proposed_by_display);
proposedByDisplayTv.setVisibility(View.VISIBLE);
proposedByDisplayTv.setText(mCurrentTeamWalk.getProposedBy());
}
// for everyone
private void displayCommonUIViews() {
Route proposedRoute = mCurrentTeamWalk.getProposedRoute();
displayRouteDetails(proposedRoute);
displayProposedDateAndTime();
}
private void displayRouteDetails(Route proposedRoute) {
displayRouteName(proposedRoute);
displayRouteStartingLocation(proposedRoute);
}
private void displayRouteName(Route proposedRoute) {
findViewById(R.id.schedule_propose_tv_walk_name_prompt).setVisibility(View.VISIBLE);
TextView walkName = findViewById(R.id.schedule_propose_tv_walk_name_display);
walkName.setVisibility(View.VISIBLE);
walkName.setText(proposedRoute.getTitle());
}
private void displayRouteStartingLocation(Route proposedRoute) {
findViewById(R.id.schedule_propose_tv_starting_loc).setVisibility(View.VISIBLE);
TextView startingLocationTv = findViewById(R.id.schedule_propose_tv_starting_loc_display);
startingLocationTv.setVisibility(View.VISIBLE);
startingLocationTv.setText(proposedRoute.getStartingLocation());
onClickListenerLaunchGoogleMaps(startingLocationTv);
}
private void onClickListenerLaunchGoogleMaps(final TextView startingLocationTv) {
startingLocationTv.setEnabled(true);
startingLocationTv.setOnClickListener( v -> {
String locationText = startingLocationTv.getText().toString();
if(!locationText.isEmpty()) {
launchGoogleMaps();
}
});
}
private void launchGoogleMaps() {
String startingLocation = mCurrentTeamWalk.getProposedRoute().getStartingLocation();
String map = "http://maps.google.co.in/maps?q=" + startingLocation;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(map));
startActivity(intent);
}
private void displayProposedDateAndTime() {
findViewById(R.id.schedule_propose_tv_walk_date).setVisibility(View.VISIBLE);
TextView walkDate = findViewById(R.id.schedule_propose_tv_walk_date_display);
walkDate.setVisibility(View.VISIBLE);
Log.d(TAG, "displayProposedDateAndTime: date: " + mCurrentTeamWalk.getProposedDateAndTime().toDate());
String formattedDateAndTime = Utils.formatDateIntoReadableString("MM/dd/yyyy 'at' hh:mm a", mCurrentTeamWalk.getProposedDateAndTime().toDate(), false);
walkDate.setText(formattedDateAndTime);
}
@Override
public void onTeamWalkStatusesRetrieved(SortedMap<String, String> statusData) {
List<IUser> teammates = new ArrayList<>();
statusData.forEach((displayName, statusString) -> {
if (!displayName.equals(mCurrentUser.getDisplayName()) && !displayName.equals(mCurrentTeamWalk.getProposedBy())) {
IUser teammate = FirebaseUserAdapter.builder()
.addDisplayName(displayName)
.addLatestWalkStatus(TeammateStatus.get(statusString))
.build();
teammates.add(teammate);
}
});
teammateStatusList = findViewById(R.id.list_members_with_status);
statusListAdapter = new TeammatesListViewAdapter(this, teammates, preferences);
teammateStatusList.setAdapter(statusListAdapter);
statusListAdapter.setShowStatusIcons(true);
teammateStatusList.setNestedScrollingEnabled(true);
}
private boolean walkNotCancelledOrWithdrawn() {
return mCurrentTeamWalk.getStatus() != TeamWalkStatus.CANCELLED && mCurrentTeamWalk.getStatus() != TeamWalkStatus.WITHDRAWN;
}
private boolean walkCancelledOrWithdrawn() {
return !walkNotCancelledOrWithdrawn();
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/fitness/FitnessService.java
package com.cse110team24.walkwalkrevolution.fitness;
public interface FitnessService {
int getRequestCode();
void setup();
void updateDailyStepCount();
void startRecording();
void stopRecording();
double getDistanceFromHeight(long steps, int heightFeet, float heightRemainderInches);
void setStartRecordingTime(long startTime);
void setEndRecordingTime(long startTime);
void setStepsToAdd(long stepsToAdd);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/utils/Builder.java
package com.cse110team24.walkwalkrevolution.utils;
public interface Builder<TBuildable> {
TBuildable build();
}
<file_sep>/app/src/androidTest/java/com/cse110team24/walkwalkrevolution/LoginActivityUIEspressoTest.java
package com.cse110team24.walkwalkrevolution;
import androidx.test.espresso.ViewInteraction;
import androidx.test.filters.LargeTest;
import androidx.test.rule.ActivityTestRule;
import androidx.test.runner.AndroidJUnit4;
import com.cse110team24.walkwalkrevolution.mockedservices.MockActivityTestRule;
import com.cse110team24.walkwalkrevolution.mockedservices.TestAuth;
import com.cse110team24.walkwalkrevolution.mockedservices.TestUsersDatabaseService;
import com.cse110team24.walkwalkrevolution.models.user.FirebaseUserAdapter;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import static androidx.test.espresso.Espresso.onData;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.Matchers.allOf;
@LargeTest
@RunWith(AndroidJUnit4.class)
public class LoginActivityUIEspressoTest {
@Rule
public MockActivityTestRule<LoginActivity> mActivityTestRule = new MockActivityTestRule<>(LoginActivity.class);
@Before
public void setup() {
// todo you're mocking what the next of each of these will be
TestAuth.isTestUserSignedIn = false;
TestAuth.successUserSignedIn = true;
TestAuth.testAuthUser = FirebaseUserAdapter.builder()
.addDisplayName("Test")
.addEmail("<EMAIL>")
.addTeamUid("666")
.addUid("1")
.build();
}
// TODO: 3/5/20 apparently assertions for EditText types suck. They will most likely fail.
@Test
public void loginActivityUIEspressoTest() {
ViewInteraction button = onView(
allOf(withId(R.id.btn_height_finish), isDisplayed()));
button.check(matches(isDisplayed()));
ViewInteraction textView = onView(
allOf(withId(R.id.sign_up_tv), withText("Don't have an account? Sign up here"), isDisplayed()));
textView.check(matches(withText("Don't have an account? Sign up here")));
}
}
<file_sep>/app/src/test/java/com/cse110team24/walkwalkrevolution/HomeActivityUnitTest.java
package com.cse110team24.walkwalkrevolution;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import androidx.test.core.app.ActivityScenario;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
import com.cse110team24.walkwalkrevolution.fitness.FitnessService;
import com.cse110team24.walkwalkrevolution.fitness.FitnessServiceFactory;
import com.cse110team24.walkwalkrevolution.fitness.GoogleFitAdapter;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.robolectric.shadows.ShadowToast;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertTrue;
@RunWith(AndroidJUnit4.class)
public class HomeActivityUnitTest extends TestInjection {
private static final String TEST_SERVICE = "TEST_SERVICE";
private static final int FEET = 5;
private static final float INCHES = 3f;
private static final String TOAST_MESSAGE = "Remember to set an end time for your walk!";
private TextView stepsTv;
private TextView distanceTv;
private TextView latestStepsTv;
private TextView latestDistanceTv;
private TextView latestTimeTv;
private TextView noWalkTodayTv;
private Button startButton;
private Button stopButton;
private Intent intent;
private long nextStepCount;
private long timeElapsed;
@Before
public void setup() {
super.setup();
FitnessServiceFactory.put(TEST_SERVICE, TestFitnessService::new);
intent = new Intent(ApplicationProvider.getApplicationContext(), HomeActivity.class)
.putExtra(HomeActivity.FITNESS_SERVICE_KEY, TEST_SERVICE)
.putExtra(HomeActivity.HEIGHT_FT_KEY, FEET)
.putExtra(HomeActivity.HEIGHT_IN_KEY, INCHES);
Mockito.when(dsf.createDatabaseService(DatabaseService.Service.USERS)).thenReturn(usersDatabaseService);
timeElapsed = 90_000;
}
@Test
public void testHeightSaved() {
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
SharedPreferences preferences = activity.getSharedPreferences(HomeActivity.APP_PREF, Context.MODE_PRIVATE);
assertEquals(5, preferences.getInt(HomeActivity.HEIGHT_FT_KEY, -1));
assertEquals(3, preferences.getFloat(HomeActivity.HEIGHT_IN_KEY, -1), 0.1);
});
}
@Test
public void testDailyStats() {
nextStepCount = 5842;
double expectedMiles = 2.39;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
activity.onActivityResult(0, Activity.RESULT_OK, null);
assertEquals(String.valueOf(nextStepCount), stepsTv.getText().toString());
assertEquals(expectedMiles, Double.valueOf(distanceTv.getText().toString()), 0.1);
});
}
@Test
public void testNoLatestWalk() {
nextStepCount = 5842;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
assertEquals(noWalkTodayTv.getVisibility(), View.VISIBLE);
assertTrue(latestDistanceTv.getText().toString().isEmpty());
assertTrue(latestStepsTv.getText().toString().isEmpty());
assertTrue(latestTimeTv.getText().toString().isEmpty());
});
}
@Test
public void testOnGoingLatestWalk() {
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
assertEquals(startButton.getVisibility(), View.VISIBLE);
assertEquals(stopButton.getVisibility(), View.INVISIBLE);
startButton.performClick();
assertEquals(startButton.getVisibility(), View.INVISIBLE);
assertEquals(stopButton.getVisibility(), View.VISIBLE);
});
}
@Test
public void testCompletedWalk() {
nextStepCount = 5842;
double timeElapsedMinutes = 1.5;
double expectedMiles = 2.39;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
startButton.performClick();
stopButton.performClick();
assertEquals(startButton.getVisibility(), View.VISIBLE);
assertEquals(stopButton.getVisibility(), View.INVISIBLE);
assertEquals(5842, (int) Integer.valueOf(latestStepsTv.getText().toString()));
assertEquals(timeElapsedMinutes, Double.valueOf(latestTimeTv.getText().toString().split(" ")[0]), 0.1);
assertEquals(expectedMiles, Double.valueOf(latestDistanceTv.getText().toString().split(" ")[0]), 0.1);
});
}
@Test
public void testDisableStopWalkWithToast() {
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
Intent mockIntent = getMockIntent();
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, mockIntent);
startButton.performClick();
assertEquals(ShadowToast.getTextOfLatestToast(), TOAST_MESSAGE);
assertEquals(startButton.getVisibility(), View.INVISIBLE);
assertEquals(stopButton.getVisibility(), View.VISIBLE);
stopButton.performClick();
assertEquals(startButton.getVisibility(), View.INVISIBLE);
assertEquals(stopButton.getVisibility(), View.VISIBLE);
assertEquals(ShadowToast.getTextOfLatestToast(), TOAST_MESSAGE);
});
}
@Test
public void testMockIncrementBeforeStarting() {
nextStepCount = 1500;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
Intent mockIntent = getMockIntent();
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, mockIntent);
assertEquals(1500, (int) Integer.valueOf(stepsTv.getText().toString()));
});
}
@Test
public void testMockedWalkStatsNoAddedSteps() {
nextStepCount = 1500;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
Intent mockIntent = getMockIntent();
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, mockIntent);
startButton.performClick();
nextStepCount = 0;
timeElapsed = 7_200_000;
Intent endWalkIntent = new Intent(ApplicationProvider.getApplicationContext(), MockActivity.class)
.putExtra(MockActivity.ADDED_STEPS_KEY, 0)
.putExtra(MockActivity.INPUT_TIME_KEY, "9:20:00")
.putExtra(MockActivity.SETTING_START_TIME_KEY, false);
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, endWalkIntent);
stopButton.performClick();
assertEquals("0", latestStepsTv.getText().toString());
assertEquals("0.00 mile(s)", latestDistanceTv.getText().toString());
assertEquals("120.00 min.", latestTimeTv.getText().toString());
});
}
@Test
public void testMockedWalkStatsAddedSteps() {
nextStepCount = 1500;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
Intent mockIntent = getMockIntent();
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, mockIntent);
startButton.performClick();
nextStepCount = 1500;
timeElapsed = 7_200_000;
Intent endWalkIntent = new Intent(ApplicationProvider.getApplicationContext(), MockActivity.class)
.putExtra(MockActivity.ADDED_STEPS_KEY, 1500)
.putExtra(MockActivity.INPUT_TIME_KEY, "9:20:00")
.putExtra(MockActivity.SETTING_START_TIME_KEY, false);
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, endWalkIntent);
stopButton.performClick();
assertEquals("1500", latestStepsTv.getText().toString());
assertEquals("0.62 mile(s)", latestDistanceTv.getText().toString());
assertEquals("120.00 min.", latestTimeTv.getText().toString());
});
}
@Test
public void testMockedWalkStatsNegativeWalkTime() {
nextStepCount = 1500;
ActivityScenario<HomeActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
getUIViews(activity);
Intent mockIntent = getMockIntent();
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, mockIntent);
startButton.performClick();
timeElapsed = 79_200_000;
Intent endWalkIntent = new Intent(ApplicationProvider.getApplicationContext(), MockActivity.class)
.putExtra(MockActivity.ADDED_STEPS_KEY, 0)
.putExtra(MockActivity.INPUT_TIME_KEY, "5:20:00")
.putExtra(MockActivity.SETTING_START_TIME_KEY, false);
activity.onActivityResult(MockActivity.REQUEST_CODE, Activity.RESULT_OK, endWalkIntent);
stopButton.performClick();
assertEquals("1320.00 min.", latestTimeTv.getText().toString());
});
}
private void getUIViews(HomeActivity activity) {
stepsTv = activity.findViewById(R.id.tv_daily_steps);
distanceTv = activity.findViewById(R.id.tv_daily_distance);
noWalkTodayTv = activity.findViewById(R.id.tv_no_recent_walk_prompt);
latestDistanceTv = activity.findViewById(R.id.tv_recent_distance);
latestStepsTv = activity.findViewById(R.id.tv_recent_steps);
latestTimeTv = activity.findViewById(R.id.tv_recent_time_elapsed);
startButton = activity.findViewById(R.id.btn_start_walk);
stopButton = activity.findViewById(R.id.btn_stop_walk);
}
private Intent getMockIntent() {
return new Intent(ApplicationProvider.getApplicationContext(), MockActivity.class)
.putExtra(MockActivity.ADDED_STEPS_KEY, 1500)
.putExtra(MockActivity.INPUT_TIME_KEY, "7:20:00")
.putExtra(MockActivity.SETTING_START_TIME_KEY, true);
}
private class TestFitnessService implements FitnessService {
private static final String TAG = "[TestFitnessService]: ";
private HomeActivity activity;
public TestFitnessService(HomeActivity activity) {
this.activity = activity;
}
@Override
public int getRequestCode() {
return 0;
}
@Override
public void setup() {
System.out.println(TAG + "setup");
}
@Override
public void updateDailyStepCount() {
System.out.println(TAG + "updateStepCount");
activity.setDailyStats(nextStepCount);
}
@Override
public void startRecording() {
}
@Override
public void stopRecording() {
activity.setLatestWalkStats(nextStepCount, timeElapsed);
}
@Override
public double getDistanceFromHeight(long steps, int heightFeet, float heightRemainderInches) {
return new GoogleFitAdapter(activity).getDistanceFromHeight(steps, heightFeet, heightRemainderInches);
}
@Override
public void setStartRecordingTime(long startTime) {
}
@Override
public void setEndRecordingTime(long startTime) {
}
@Override
public void setStepsToAdd(long stepsToAdd) {
}
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingObserver.java
package com.cse110team24.walkwalkrevolution.firebase.messaging;
import com.cse110team24.walkwalkrevolution.models.invitation.Invitation;
import com.google.android.gms.tasks.Task;
public interface MessagingObserver {
/**
* Called by the MessagingSubject this observer is observing when the subject successfully
* sent the invitation.
* <p>See also: {@link MessagingSubject#notifyObserversInvitationSent(Invitation)}</p>
* @param invitation the invitation that was sent
*/
void onInvitationSent(Invitation invitation);
/**
* Called by the MessagingSubject this observer is observing when the subject failed to send an
* invitation.
* <p>See also: {@link MessagingSubject#notifyObserversFailedInvitationSent(Task)}</p>
* @param task the task containing the result of trying to send the invitation
*/
void onFailedInvitationSent(Task<?> task);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/auth/AuthObserver.java
package com.cse110team24.walkwalkrevolution.firebase.auth;
import com.cse110team24.walkwalkrevolution.models.user.IUser;
public interface AuthObserver {
/**
* Called when the AuthServiceSubject this observer is observing signs in the user
* <p>See also: {@link AuthSubject#notifyObserversSignedIn(IUser)}</p>
* @param user the user that was signed in
*/
void onUserSignedIn(IUser user);
/**
* Called when the AuthServiceSubject this observer is observing signs up the user
* <p>See also: {@link AuthSubject#notifyObserversSignedUp(IUser)}</p>
* @param user the user that was signed up
*/
void onUserSignedUp(IUser user);
/**
* Called when the AuthServiceSubject this observer is observing fails to sign in the user
* <p>See also: {@link AuthSubject#notifyObserversSignInError(Auth.AuthError)}</p>
* @param error the error that was encountered during sign in
*/
void onAuthSignInError(Auth.AuthError error);
/**
* Called when the AuthServiceSubject this observer is observing fails to sign up the user
* <p>See also: {@link AuthSubject#notifyObserversSignUpError(Auth.AuthError)}</p>
* @param error the error that was encountered during sign up
*/
void onAuthSignUpError(Auth.AuthError error);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/messaging/FirebaseMessagingFactory.java
package com.cse110team24.walkwalkrevolution.firebase.messaging;
import android.app.Activity;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
public class FirebaseMessagingFactory implements MessagingFactory {
@Override
public Messaging createMessagingService(Activity activity, DatabaseService databaseService) {
return new FirebaseMessagingAdapter(activity, databaseService);
}
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/firebase/messaging/MessagingFactory.java
package com.cse110team24.walkwalkrevolution.firebase.messaging;
import android.app.Activity;
import com.cse110team24.walkwalkrevolution.firebase.firestore.services.DatabaseService;
public interface MessagingFactory {
Messaging createMessagingService(Activity activity, DatabaseService databaseService);
}
<file_sep>/app/src/main/java/com/cse110team24/walkwalkrevolution/utils/RoutesManager.java
package com.cse110team24.walkwalkrevolution.utils;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
import com.cse110team24.walkwalkrevolution.activities.userroutes.RoutesActivity;
import com.cse110team24.walkwalkrevolution.models.route.Route;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
/**
* provides various methods to help manage Routes
*/
public class RoutesManager {
private static final String TAG = "WWR_RoutesManager";
/**
* write a list of Route objects to a file
* @param routes list of routes to be written
* @param filename file to be written to in app storage
* @param context application context from which to get file
* @throws IOException if the file stream could not be created
*/
public static void writeList(List<Route> routes, String filename, Context context) throws IOException {
FileOutputStream fos = context.openFileOutput(filename, Context.MODE_PRIVATE);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(routes);
oos.close();
Log.i(TAG, "writeList: successfully wrote list of routes to " + filename);
}
/**
* write a Route object to a file
* @param route Route object to be written
* @param filename file to be written to in app storage
* @param context application context from which to get file
* @throws IOException if the file stream could not be created
*/
public static void writeSingle(Route route, String filename, Context context) throws IOException {
FileOutputStream fos = context.openFileOutput(filename, Context.MODE_PRIVATE);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(route);
oos.close();
Log.i(TAG, "writeSingle: successfully wrote single Route object to " + filename);
}
/**
* read a list of Route objects from a file
* @param filename file to be read from in app storage
* @param context application context from which to get file
* @return a List<Route> object (empty if file was not read)
* @throws IOException if the file stream could not be created
*/
public static List<Route> readList(String filename, Context context) throws IOException {
ObjectInputStream ois = getInputStream(filename, context);
if (ois == null) {
return new ArrayList<>();
}
List<Route> routes = null;
try {
routes = (List<Route>) ois.readObject();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
ois.close();
return (routes == null) ? new ArrayList<>() : routes;
}
/**
* read a route object from a file
* @param filename file to be read from in app storage
* @param context application context from which to get file
* @return a Route object (caution: returns null if not read)
* @throws IOException if the file stream could not be created
*/
public static Route readSingle(String filename, Context context) {
ObjectInputStream ois = getInputStream(filename, context);
if (ois == null) {
return null;
}
Route route = null;
try {
route = (Route) ois.readObject();
} catch (Exception e) {
e.printStackTrace();
}
try {
ois.close();
} catch (IOException e) {
e.printStackTrace();
}
return route;
}
/**
* @param filename filename to look for latest route
* @return a route object if a latest route exists or null otherwise
*/
public static Route readLatest(String filename, Context context) {
return readSingle(filename, context);
}
public static void replaceInList(Route route, int idx, String listFilename, Context context) throws IOException{
if (idx < 0) {
appendToList(route, listFilename, context);
return;
}
List<Route> routes = readList(listFilename, context);
routes.remove(idx);
routes.add(idx, route);
writeList(routes, listFilename, context);
}
public static void appendToList(Route route, String filename, Context context) throws IOException {
List<Route> storedRoutes = readList(filename, context);
Log.d(TAG, "appendToList: storedRoutes size " + storedRoutes.size());
storedRoutes.add(route);
writeList(storedRoutes, filename, context);
Log.i(TAG, "appendToList: successfully appended single Route object to" + filename +" by calling writeListg");
}
/**
* otherwise just write the new route. if route's stats are null, does nothing
* @param route route to be written to file
* @param filename file to be written to
*/
public static void writeLatest(Route route, String filename, Context context) throws IllegalArgumentException,
IOException {
if (route.getStats() == null) {
throw new IllegalArgumentException("Can't write latest route without stats");
}
deleteExistingFile(filename, context);
writeSingle(route, filename, context);
}
private static void deleteExistingFile(String filename, Context context) {
context.deleteFile(filename);
}
// for convenience - gets input stream, handling exceptions
private static ObjectInputStream getInputStream(String filename, Context context) {
FileInputStream fis = null;
try {
fis = context.openFileInput(filename);
} catch (FileNotFoundException e) {
return null;
}
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(fis);
} catch (IOException e) {}
return ois;
}
public static class AsyncTaskSaveRoutes extends AsyncTask<Object, Object, Object> {
@Override
protected Object doInBackground(Object... params) {
List<Route> routes = (List<Route>) params[0];
Context context = (Context) params[1];
try {
writeList(routes, RoutesActivity.LIST_SAVE_FILE, context);
} catch (IOException e) {
Log.e(TAG, "doInBackground: Couldn't save to file", e);
return null;
}
Log.i(TAG, "doInBackground: saved current instance of routes to file");
return null;
}
}
} | 6f5c694a876c3728c809d7c47baf53cc58c2d75b | [
"Markdown",
"Java",
"JavaScript",
"HTML"
] | 54 | Java | CheeryW/team-project-team24 | eb6ed30c13c145c7c571aacaeea2978aadb19db5 | 05cc2323c61093cb5a551392e0f8252ed470247b | |
refs/heads/master | <repo_name>MiSeWang/taotao-parent<file_sep>/taotao-manager/taotao-manager-web/src/main/java/com/mrlv/taotao/controller/IndexController.java
package com.mrlv.taotao.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
/**
* 主页面
* @author Administrator
*
*/
@Controller
public class IndexController {
@RequestMapping("/{itemId}")
public String Index(@PathVariable String itemId) {
return itemId;
}
@RequestMapping("/")
public String Index() {
return "index";
}
}
<file_sep>/taotao-manager/taotao-manager-service/src/main/java/com/mrlv/taotao/service/Impl/ItemCatServiceImpl.java
package com.mrlv.taotao.service.Impl;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.stereotype.Service;
import com.mrlv.taotao.common.pojo.EasyUITreeNode;
import com.mrlv.taotao.mapper.ItemCatMapper;
import com.mrlv.taotao.pojo.ItemCat;
import com.mrlv.taotao.pojo.ItemCatExample;
import com.mrlv.taotao.pojo.ItemCatExample.Criteria;
import com.mrlv.taotao.service.IItemCatService;
/**
* 商品分类
* @author Administrator
*
*/
@Service("itemCatService")
public class ItemCatServiceImpl implements IItemCatService {
@Resource
private ItemCatMapper itemCatMapper;
@Override
public List<EasyUITreeNode> getItemCatList(Long parentId) {
ItemCatExample ice = new ItemCatExample();
Criteria iceCriteria = ice.createCriteria();
//设置查询条件,根据parentid查询子节点
iceCriteria.andParentIdEqualTo(parentId);
//返回子节点列表
List<ItemCat> itemCats = itemCatMapper.selectByExample(ice);
List<EasyUITreeNode> list = new ArrayList<EasyUITreeNode>();
for (ItemCat itemCat : itemCats) {
EasyUITreeNode node = new EasyUITreeNode();
node.setId(itemCat.getId());
node.setText(itemCat.getName());
//如果是父节点的话就设置成关闭状态,如果是叶子节点就是open状态
node.setState(itemCat.getIsParent()?"closed":"open");
list.add(node);
}
return list;
}
}
<file_sep>/taotao-rest/src/main/java/com/mrlv/taotao/rest/service/impl/ContentServiceImpl.java
package com.mrlv.taotao.rest.service.impl;
import java.util.List;
import javax.annotation.Resource;
import com.mrlv.taotao.common.utils.JsonUtils;
import com.mrlv.taotao.rest.dao.JedisClient;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.mrlv.taotao.mapper.ContentMapper;
import com.mrlv.taotao.pojo.Content;
import com.mrlv.taotao.pojo.ContentExample;
import com.mrlv.taotao.pojo.ContentExample.Criteria;
import com.mrlv.taotao.rest.service.IContentService;
@Service("contentService")
public class ContentServiceImpl implements IContentService {
@Resource
private ContentMapper contentMapper;
@Resource
private JedisClient jedisClient;
@Value("${INDEX_CONTENT_REDIS_KEY}")
private String INDEX_CONTENT_REDIS_KEY;
public List<Content> getContentList(long contentCid){
//从缓存中取内容
try {
String result = jedisClient.hget(INDEX_CONTENT_REDIS_KEY, contentCid + "");
//判断是否为空
if (!StringUtils.isBlank(result)){
//将字符转为list
List<Content> contents = JsonUtils.jsonToList(result, Content.class);
return contents;
}
} catch (Exception e) {
e.printStackTrace();
}
//根据内容分类id查询内容列表
ContentExample contentExample = new ContentExample();
Criteria criteria = contentExample.createCriteria();
criteria.andCategoryIdEqualTo(contentCid);
//执行查询
List<Content> list = contentMapper.selectByExample(contentExample);
//向缓存中添加内容
try {
//将list转换为字符串
String cacheString = JsonUtils.objectToJson(list);
jedisClient.hset(INDEX_CONTENT_REDIS_KEY, contentCid + "", cacheString);
} catch (Exception e) {
e.printStackTrace();
}
return list;
}
}
<file_sep>/taotao-manager/taotao-manager-service/src/main/java/com/mrlv/taotao/service/Impl/PictureServiceImpl.java
package com.mrlv.taotao.service.Impl;
import java.io.IOException;
import org.joda.time.DateTime;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import com.mrlv.taotao.common.pojo.PictureResult;
import com.mrlv.taotao.common.utils.FtpUtil;
import com.mrlv.taotao.common.utils.IDUtils;
import com.mrlv.taotao.service.IPictureService;
/**
* 图片上传
* @author Administrator
*
*/
@Service("pictureService")
public class PictureServiceImpl implements IPictureService {
@Value("${FTP_ADDRESS}")
private String FTP_ADDRESS;
@Value("${FTP_PORT}")
private Integer FTP_PORT;
@Value("${FTP_USERNAME}")
private String FTP_USERNAME;
@Value("${FTP_PASSWORD}")
private String FTP_PASSWORD;
@Value("${FTP_BASE_PATH}")
private String FTP_BASE_PATH;
@Value("${IMAGE_BASE_URL}")
private String IMAGE_BASE_URL;
@Override
public PictureResult uploadPicture(MultipartFile uploadFile) {
try {
//生成一个新的文件名,也可以用UUID
String newName = IDUtils.genImageName();
//取原始文件名
String oldName = uploadFile.getOriginalFilename();
newName = newName + oldName.substring(oldName.lastIndexOf("."));
//获取日期路径名
String imagePath = new DateTime().toString("/yyyy/MM/dd");
System.out.println(imagePath + "/" + newName);
//图片上传
boolean result = FtpUtil.uploadFile(FTP_ADDRESS, FTP_PORT, FTP_USERNAME, FTP_PASSWORD,
FTP_BASE_PATH, imagePath, newName, uploadFile.getInputStream());
if(!result) {
PictureResult pictureResult = new PictureResult(1, null, "文件上传失败");
return pictureResult;
} else {
PictureResult pictureResult = new PictureResult(0, IMAGE_BASE_URL + imagePath + "/" + newName, null);
return pictureResult;
}
} catch (IOException e) {
PictureResult pictureResult = new PictureResult(1, null, "文件上传发生异常");
return pictureResult;
}
}
}
<file_sep>/taotao-manager/taotao-manager-web/src/main/java/com/mrlv/taotao/controller/ContentCategoryController.java
package com.mrlv.taotao.controller;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mrlv.taotao.common.pojo.EasyUITreeNode;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.service.IContentCategroyService;
@Controller
public class ContentCategoryController {
@Resource
private IContentCategroyService contentCategroyService;
@RequestMapping("/content/category/list")
@ResponseBody
public List<EasyUITreeNode> getContentCatList(@RequestParam(value="id", defaultValue="0")Long parentId){
return contentCategroyService.getCategoryList(parentId);
}
@RequestMapping("/content/category/create")
@ResponseBody
public TaotaoResult createContentCategory(Long parentId, String name) {
return contentCategroyService.insertContentCategory(parentId, name);
}
@RequestMapping("/content/category/delete")
@ResponseBody
public TaotaoResult delectContentCategory(Long parentId, Long id) {
return contentCategroyService.delectContentCategory(id);
}
}
<file_sep>/taotao-rest/src/main/java/com/mrlv/taotao/rest/service/impl/RedisServiceImpl.java
package com.mrlv.taotao.rest.service.impl;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.common.utils.ExceptionUtil;
import com.mrlv.taotao.rest.dao.JedisClient;
import com.mrlv.taotao.rest.service.IRedisService;
import org.springframework.beans.factory.annotation.Value;
import javax.annotation.Resource;
public class RedisServiceImpl implements IRedisService{
@Resource
private JedisClient jedisClient;
@Value("${INDEX_CONTENT_REDIS_KEY}")
private String INDEX_CONTENT_REDIS_KEY;
@Override
public TaotaoResult syncContent(long contentId) {
try {
jedisClient.hdel(INDEX_CONTENT_REDIS_KEY, contentId+"");
} catch (Exception e) {
e.printStackTrace();
return TaotaoResult.build(500, ExceptionUtil.getStackTrace(e));
}
return TaotaoResult.ok();
}
}
<file_sep>/taotao-manager/taotao-manager-service/src/main/java/com/mrlv/taotao/service/Impl/ItemServiceImpl.java
package com.mrlv.taotao.service.Impl;
import java.util.Date;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.stereotype.Service;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.mrlv.taotao.common.pojo.EasyUIResult;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.common.utils.IDUtils;
import com.mrlv.taotao.mapper.ItemDescMapper;
import com.mrlv.taotao.mapper.ItemMapper;
import com.mrlv.taotao.pojo.Item;
import com.mrlv.taotao.pojo.ItemDesc;
import com.mrlv.taotao.pojo.ItemExample;
import com.mrlv.taotao.service.IItemService;
/**
* 商品
* @author Administrator
*
*/
@Service("itemService")
public class ItemServiceImpl implements IItemService {
@Resource
private ItemMapper itemMapper;
@Resource
private ItemDescMapper itemDescMapper;
@Override
public Item getItemById(Long id) {
return itemMapper.selectByPrimaryKey(id);
}
@Override
public EasyUIResult getItemList(Integer page, Integer rows) {
//分页处理
PageHelper.startPage(page, rows);
List<Item> items = itemMapper.selectByExample(new ItemExample());
EasyUIResult eugr = new EasyUIResult();
eugr.setRows(items);
//取总记录数
PageInfo<Item> info = new PageInfo<Item>(items);
eugr.setTotal(info.getTotal());
return eugr;
}
@Override
public TaotaoResult createItem(Item item, String desc) throws Exception {
//生成商品id
Long itemId = IDUtils.genItemId();
item.setId(itemId);
//商品状态:1-正常,2-下架,3-删除
item.setStatus((byte)1);
item.setCreated(new Date());
item.setUpdated(new Date());
itemMapper.insert(item);
TaotaoResult descResult = insertItemDesc(itemId, desc);
if(descResult.getStatus() != 200) {
throw new Exception();
}
return TaotaoResult.ok();
}
public ItemDesc getItemDesc(Long itemId){
ItemDesc itemDesc = itemDescMapper.selectByPrimaryKey(itemId);
return itemDesc;
}
/**
* 添加商品描述
* @param desc
* @return
*/
private TaotaoResult insertItemDesc(Long itemId, String desc) {
ItemDesc itemDesc = new ItemDesc();
itemDesc.setItemId(itemId);
itemDesc.setItemDesc(desc);
itemDesc.setCreated(new Date());
itemDesc.setUpdated(new Date());
itemDescMapper.insert(itemDesc);
return TaotaoResult.ok();
}
}
<file_sep>/taotao-rest/src/test/java/com/mrlv/taotao/jedis/JedisTest.java
package com.mrlv.taotao.jedis;
import org.junit.Test;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.JedisPool;
import java.util.HashMap;
import java.util.HashSet;
public class JedisTest {
/**
* 单机版
*/
@Test
public void testJedisSingle(){
//创建一个jedis的对象
Jedis jedis = new Jedis("127.0.0.1", 6379);
//调用jedis对象的方法,方法名和redis一样
jedis.set("one", "初号机");
//关闭jedis
String one = jedis.get("one");
System.out.println(one);
jedis.close();
}
/**
* 使用连接池
*/
@Test
public void testJedisPool(){
//创建jedis的连接池对象
JedisPool jedisPool = new JedisPool("127.0.0.1", 6379);
//从连接池中获得Jedis对象
Jedis jedis = jedisPool.getResource();
jedis.set("two", "二号机");
String two = jedis.get("two");
System.out.println(two);
//关闭jedis对象
jedis.close();
jedisPool.close();
}
/**
* 集群版测试
*/
// @Test
public void testJedisCluster(){
//创建节点集合
HashSet<HostAndPort> nodes = new HashSet<>();
//添加节点
nodes.add(new HostAndPort("127.0.0.1", 6379));
nodes.add(new HostAndPort("127.0.0.1", 6379));
nodes.add(new HostAndPort("127.0.0.1", 6379));
nodes.add(new HostAndPort("127.0.0.1", 6379));
JedisCluster cluster = new JedisCluster(nodes);
cluster.set("three", "三号机");
String three = cluster.get("three");
System.out.println(three);
cluster.close();
}
}
<file_sep>/taotao-manager/taotao-manager-service/src/main/java/com/mrlv/taotao/service/IPictureService.java
package com.mrlv.taotao.service;
import org.springframework.web.multipart.MultipartFile;
import com.mrlv.taotao.common.pojo.PictureResult;
public interface IPictureService {
//也可以使用Map
public PictureResult uploadPicture(MultipartFile uploadFile);
}
<file_sep>/taotao-manager/taotao-manager-web/src/main/java/com/mrlv/taotao/controller/ContentController.java
package com.mrlv.taotao.controller;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mrlv.taotao.common.pojo.EasyUIResult;
import com.mrlv.taotao.common.pojo.PictureResult;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.pojo.Content;
import com.mrlv.taotao.service.IContentService;
@Controller
public class ContentController {
@Resource
private IContentService contentService;
@RequestMapping("/content/query/list")
@ResponseBody
public EasyUIResult list(Integer page, Integer rows, long categoryId) {
EasyUIResult result = contentService.getContent(page, rows, categoryId);
return result;
}
@RequestMapping("/content/save")
@ResponseBody
public TaotaoResult insertContent(Content content) {
TaotaoResult result = contentService.insertContent(content);
return result;
}
}
<file_sep>/taotao-manager/taotao-manager-web/src/main/java/com/mrlv/taotao/controller/ItemCatController.java
package com.mrlv.taotao.controller;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mrlv.taotao.common.pojo.EasyUITreeNode;
import com.mrlv.taotao.service.IItemCatService;
/**
* 商品分类
* @author Administrator
*
*/
@Controller
public class ItemCatController {
@Resource
private IItemCatService itemCatService;
@RequestMapping("/item/cat/list")
@ResponseBody
//defaultValue为默认值,value为name
public List<EasyUITreeNode> categoryList(@RequestParam(value="id",defaultValue="0") Long parentId) {
List<EasyUITreeNode> itemCatList = itemCatService.getItemCatList(parentId);
return itemCatList;
}
}
<file_sep>/taotao-manager/taotao-manager-service/src/main/java/com/mrlv/taotao/service/IItemParamItemService.java
package com.mrlv.taotao.service;
public interface IItemParamItemService {
public String getItemParamByItemId(Long itemId);
}
<file_sep>/taotao-manager/taotao-manager-web/src/main/java/com/mrlv/taotao/controller/ItemParamItemController.java
package com.mrlv.taotao.controller;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import com.mrlv.taotao.service.IItemParamItemService;
@Controller
public class ItemParamItemController {
@Resource
private IItemParamItemService itemParamItemService;
@RequestMapping("/item/1111/{itemId}")
public String showItemParam(@PathVariable Long itemId, Model model) {
String string = itemParamItemService.getItemParamByItemId(itemId);
model.addAttribute("itemParam", string);
return "item";
}
}
<file_sep>/taotao-rest/src/main/java/com/mrlv/taotao/rest/controller/ContentController.java
package com.mrlv.taotao.rest.controller;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.common.utils.ExceptionUtil;
import com.mrlv.taotao.pojo.Content;
import com.mrlv.taotao.rest.service.IContentService;
@Controller
public class ContentController {
@Resource
private IContentService contentService;
@RequestMapping(value="/content/list/{contentCategoryId}"
,produces=MediaType.APPLICATION_JSON_VALUE + ";charset=utf-8")
@ResponseBody
public TaotaoResult getContentList(@PathVariable Long contentCategoryId) {
try {
List<Content> contentList = contentService.getContentList(contentCategoryId);
return TaotaoResult.ok(contentList);
} catch (Exception e) {
e.printStackTrace();
return TaotaoResult.build(500, ExceptionUtil.getStackTrace(e));
}
}
}
<file_sep>/taotao-manager/taotao-manager-web/src/main/java/com/mrlv/taotao/controller/ItemParamController.java
package com.mrlv.taotao.controller;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mrlv.taotao.common.pojo.EasyUIResult;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.pojo.ItemParam;
import com.mrlv.taotao.service.IItemParamService;
/**
* 商品规格参数
* @author Administrator
*
*/
@Controller
public class ItemParamController {
@Resource
private IItemParamService itemParamService;
@RequestMapping("/item/param/list")
@ResponseBody
public EasyUIResult getItemParamList(Integer page, Integer rows) {
EasyUIResult result = itemParamService.getItemParamList(page, rows);
return result;
}
/**
* 根据商品分类id查询规格参数
* @param itemCatId
* @return
*/
@RequestMapping("/item/param/query/itemcatid/{itemCatId}")
@ResponseBody
public TaotaoResult getItemParamByCid(@PathVariable Long itemCatId) {
TaotaoResult result = itemParamService.getItemParamByCid(itemCatId);
return result;
}
/**
* 添加规格参数
* <p>Title: insertItemParamItem</p>
* <p>Description: </p>
* @return
*/
@RequestMapping("/item/param/save/{itemCatId}")
@ResponseBody
public TaotaoResult insertItemParam(@PathVariable Long itemCatId, String paramData) {
ItemParam itemParam = new ItemParam();
itemParam.setItemCatId(itemCatId);
itemParam.setParamData(paramData);
TaotaoResult result = itemParamService.insertItemParam(itemParam);
return result;
}
}
<file_sep>/taotao-search/src/main/java/com/mrlv/taotao/search/controller/ItemController.java
package com.mrlv.taotao.search.controller;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.search.service.IItemService;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
/**
* 索引库维护
*/
@Controller
@RequestMapping("/manager")
public class ItemController {
@Resource
private IItemService itemService;
/**
* 导入商品入索引库
*/
@RequestMapping("/importall")
@ResponseBody
public TaotaoResult importAllItem(){
TaotaoResult result = itemService.importAllItem();
return result;
}
}
<file_sep>/taotao-portal/src/main/java/com/mrlv/taotao/portal/controller/IndexController.java
package com.mrlv.taotao.portal.controller;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import com.mrlv.taotao.portal.service.IContentService;
@Controller
public class IndexController {
@Resource
private IContentService contentService;
@RequestMapping("/index")
public String showIndex(Model model) {
String adJson = contentService.getContentList();
model.addAttribute("ad1", adJson);
return "index";
}
}
<file_sep>/taotao-search/src/main/java/com/mrlv/taotao/search/service/impl/ItemServiceImpl.java
package com.mrlv.taotao.search.service.impl;
import com.mrlv.taotao.common.pojo.TaotaoResult;
import com.mrlv.taotao.search.mapper.ItemSearchMapper;
import com.mrlv.taotao.search.pojo.Item;
import com.mrlv.taotao.search.service.IItemService;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.common.SolrInputDocument;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
@Service("itemService")
public class ItemServiceImpl implements IItemService{
@Resource
private ItemSearchMapper itemSearchMapper;
@Resource
private HttpSolrClient httpSolrClient;
@Override
public TaotaoResult importAllItem() {
httpSolrClient = new HttpSolrClient.Builder("http://localhost:8983/solr/taotao_core").build();
//查询商品列表
List<Item> itemList = itemSearchMapper.getItemList();
//将商品信息写入索引库
try {
for (Item item : itemList){
//创建一个SolrInputDocument
SolrInputDocument document = new SolrInputDocument();
document.setField("id", item.getId());
document.setField("item_title", item.getTitle());
document.setField("item_sell_point", item.getSell_point());
document.setField("item_price", item.getPrice());
document.setField("item_image", item.getImage());
document.setField("item_category_name", item.getCategory_name());
//一旦出现不存在的索引字段,则无法创建索引
// document.setField("item_desc", item.getItem_des());
httpSolrClient.add(document);
}
httpSolrClient.commit();
} catch (Exception e) {
e.printStackTrace();
}
return TaotaoResult.ok();
}
}
| 67ecbbf1e88597a39bd1d807d100a75067f898cb | [
"Java"
] | 18 | Java | MiSeWang/taotao-parent | 3b3b82286a104aacc0cfde53ec21ee3c6f867df7 | ef2ccda4eb49175bd82f7706f6ce45bfcc9b3c4f | |
refs/heads/main | <repo_name>sophiemichaud/Array-Manipulation<file_sep>/README.md
# Array-Manipulation
Program that manipulates the rows and columns of an array
<file_sep>/array_manipulation.c
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "array_manipulation.h"
int main(int argc, char *argv[]){
//creates a double_array struct that holds a 6 row by 9 column array
printf("This is the original 6 row by 9 column array:\n\n");
struct Double_Array *myArray = doubleArray(6,9);
randomizeArray(myArray, 23, 72);
printArray(myArray);
printf("\n");
//randomly picks two row swaps and swaps the rows in the original array
int a = rand() % 6;
int b = rand() % 9;
printf("Rows %d and %d of the original array have been swapped:\n\n", a, b);
swapRows(myArray,a,b);
printRowArray(myArray);
printf("\n");
//randomly picks two column swaps and swaps them
int c = rand() % 6;
int d = rand() % 9;
printf("Columns %d and %d of the array have been swapped:\n\n",c,d);
swapColumns(myArray,c,d);
printArray(myArray);
printf("\n\n\n\n");
//frees all malloced memory before exiting the program
freeArray(myArray);
return 0;
}
//returns a pointer to a struct given the input for num rows and num columns
struct Double_Array *doubleArray(int row, int col){
//allocates memory for the struct pointer
struct Double_Array *myArray = malloc(sizeof(struct Double_Array));
//sets row and col size of the struct
myArray -> rowsize = row;
myArray -> colsize = col;
//allocates memory for the 2D array row elements
myArray -> array = malloc (row * sizeof(double*));
//allocates memory for the 2D array column elements
for (int i = 0; i < row; i++){
myArray -> array[i] = malloc (col* sizeof(double));
}
return myArray;
}
//takes in Double_Array struct pointer and frees struct as well as the array in it
void freeArray(struct Double_Array *myArray){
//frees each element in the struct array
for (int i = 0; i < myArray -> rowsize; i++){
free(myArray -> array[i]);
}
//frees the array then the struct pointer
free(myArray -> array);
free(myArray);
}
//takes Double_Array struct pointer and prints the array with each element to 1 decimal place
void printArray(struct Double_Array *myArray){
for (int i = 0; i < myArray -> rowsize; i++){
for (int j = 0; j < myArray -> colsize; j++){
printf("%0.1f ", myArray -> array[i][j]);
}
printf("\n");
}
return;
}
struct Double_Array *printRowArray(struct Double_Array *swapRows){
for (int i = 0; i < swapRows -> rowsize; i++){
for (int j = 0; j < swapRows -> colsize; j++){
printf("%0.1f ", swapRows -> array[i][j]);
}
printf("\n");
}
return 0;
}
//takes in the Double_Array struct and an upper and lower bound numbers to initialize the elements inside the struct array
struct Double_Array *randomizeArray(struct Double_Array *myArray, double a, double b){
//initializes elements in the array to random values between the lower and upper bounds
for (int i = 0; i < myArray -> rowsize; i++){
for (int j = 0; j < myArray -> colsize; j++){
myArray -> array[i][j] = ((double) rand() / (double) RAND_MAX) * (b - a) +a;
}
}
return myArray;
}
//two columns in the struct array are swapped given that the function integer inputs are valid
int swapColumns(struct Double_Array *myArray, int x, int y){
//if the integer inputs are valid based on the struct column and row size, rows are swapped
if(x >= 0 && x <= myArray -> colsize && y >= 0 && y <= myArray -> colsize){
double tempValue;
//loops through each row and stores one of the chosen column elements in a temporary variable
for(int i = 0; i < myArray -> rowsize; i++){
tempValue = myArray -> array[i][x];
myArray -> array[i][x] = myArray -> array[i][y];
myArray -> array[i][y] = tempValue;
}
return 1;
}
else{
return 0;
}
}
//rows of the struct array are swapped given valid integer inputs
int swapRows(struct Double_Array *myArray, int x, int y){
//swaps rows of the array given valid input rows to swap
if(x >= 0 && x <= myArray -> rowsize && y >= 0 && y <= myArray -> rowsize){
double *tempValue = malloc(sizeof(double) * myArray -> colsize);
tempValue = myArray -> array[x];
myArray -> array[x] = myArray -> array[y];
myArray -> array[y] = tempValue;
//frees the temporary variable
free(tempValue);
return 1;
}
else{
return 0;
}
}
<file_sep>/makefile
CFLAGS = -std=c99 -pedantic -Wall
CC = gcc -g
all: array_manipulation
array_manipulation:array_manipulation.o
$(CC) $(CFLAGS) array_manipulation.o -o array_manipulation
array_manipulation.o:array_manipulation.c array_manipulation.h
$(CC) $(CFLAGS) -c array_manipulation.c -o array_manipulation.o
clean:
rm -i *.o array_manipulation
| 6a1649f249628f9512fc35659834826a313a6a18 | [
"Markdown",
"C",
"Makefile"
] | 3 | Markdown | sophiemichaud/Array-Manipulation | 072b0f480c03a60ef08cc831767408238004f3e3 | 0d60e1224bb76cb7d97e3a454ed9b588ab758b7c | |
refs/heads/master | <file_sep>#! -*- coding: utf8 -*-
#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from trytond.pool import *
from trytond.model import ModelView, ModelSQL, fields
from trytond.pyson import Eval
from trytond.pyson import Id
from trytond.transaction import Transaction
from trytond.pool import Pool, PoolMeta
from decimal import Decimal
from trytond.wizard import (Wizard, StateView, StateAction, StateTransition,
Button)
try:
import bcrypt
except ImportError:
bcrypt = None
import random
import hashlib
import string
from trytond.config import config
__all__ = ['Template', 'ListByProduct', 'UpdatePriceListByProduct',
'WizardPriceListByProduct']
__metaclass__ = PoolMeta
STATES = {
'readonly': ~Eval('active', True),
}
DEPENDS = ['active']
DIGITS = int(config.get('digits', 'unit_price_digits', 4))
class Template:
__name__ = 'product.template'
listas_precios = fields.One2Many('product.list_by_product', 'template', 'Listas de precio',
states=STATES,depends=DEPENDS)
@classmethod
def __setup__(cls):
super(Template, cls).__setup__()
cls.list_price_with_tax.states['readonly'] = Eval('active', True)
@fields.depends('name')
def on_change_name(self):
if self.name:
self.raise_user_error(u'No olvide configurar: \n-Categoria\n-Impuestos(Pestaña Contabilidad)')
@fields.depends('cost_price', 'listas_precios', 'id', 'taxes_category',
'category', 'list_price_with_tax', 'list_price')
def on_change_cost_price(self):
pool = Pool()
Taxes1 = pool.get('product.category-customer-account.tax')
Taxes2 = pool.get('product.template-customer-account.tax')
Product = pool.get('product.product')
products = Product.search([('template', '=', self.id)])
for p in products:
product = p
PriceList = pool.get('product.price_list')
User = pool.get('res.user')
priceslist = PriceList.search([('incluir_lista', '=', True)])
res= {}
percentage = 0
precio_final = Decimal(0.0)
user = User(Transaction().user)
precio_total = Decimal(0.0)
precio_total_iva = Decimal(0.0)
iva = Decimal(0.0)
precio_para_venta = Decimal(0.0)
if self.taxes_category == True:
if self.category.taxes_parent == True:
taxes1= Taxes1.search([('category','=', self.category.parent)])
taxes2 = Taxes2.search([('product','=', self.id)])
else:
taxes1= Taxes1.search([('category','=', self.category)])
else:
taxes1= Taxes1.search([('category','=', self.category)])
taxes2 = Taxes2.search([('product','=', self.id)])
if self.listas_precios:
pass
else:
if self.cost_price:
lineas = []
for pricelist in priceslist:
for line in pricelist.lines:
if line.percentage > 0:
percentage = line.percentage/100
if line.use_new_formula == True:
if pricelist.definir_precio_tarjeta == True:
precio_final = precio_para_venta / (1 - percentage)
else:
precio_final = self.cost_price / (1 - percentage)
else:
if pricelist.definir_precio_tarjeta == True:
precio_final = precio_para_venta * (1 + percentage)
else:
precio_final = self.cost_price * (1 + percentage)
if user.company.currency:
precio_final = user.company.currency.round(precio_final)
if taxes1:
for t in taxes1:
iva = precio_final * t.tax.rate
elif taxes2:
for t in taxes2:
iva = precio_final * t.tax.rate
precio_total = precio_final + iva
lineas.append({
'lista_precio': pricelist.id,
'fijo' : precio_final,
'fijo_con_iva':precio_total,
'precio_venta' : pricelist.definir_precio_venta,
})
if pricelist.definir_precio_venta == True:
precio_para_venta = precio_final
precio_total_iva = precio_total
res['listas_precios'] = lineas
res['list_price'] = precio_para_venta
res['list_price_with_tax'] = precio_total_iva
return res
@fields.depends('listas_precios', 'list_price', 'taxes_category', 'category',
'list_price_with_tax', 'customer_taxes', 'cost_price')
def on_change_listas_precios(self):
if self.list_price_with_tax:
price_with_tax = self.list_price_with_tax
else:
price_with_tax = Decimal(0.0)
changes = {
'list_price_with_tax': self.list_price,
'list_price': price_with_tax,
}
if self.listas_precios:
for lista in self.listas_precios:
if (lista.fijo_con_iva > Decimal(0.0)) and (lista.precio_venta == True):
changes['list_price_with_tax'] = lista.fijo_con_iva
changes['list_price'] = self.get_list_price_new(lista.fijo_con_iva)
return changes
return changes
def get_list_price_new(self, list_price_with_tax):
Tax = Pool().get('account.tax')
taxes = [Tax(t) for t in self.get_taxes('customer_taxes_used')]
tax_amount = Tax.reverse_compute(list_price_with_tax, taxes)
return tax_amount.quantize(Decimal(str(10.0 ** -DIGITS)))
@classmethod
def validate(cls, products):
for product in products:
for lists in product.listas_precios:
if lists.fijo < product.cost_price:
super(Template, cls).validate(products)
def pre_validate(self):
pool = Pool()
User = pool.get('res.user')
Product = pool.get('product.template')
Variante = pool.get('product.product')
def in_group():
pool = Pool()
ModelData = pool.get('ir.model.data')
User = pool.get('res.user')
Group = pool.get('res.group')
origin = str(self)
user = User(Transaction().user)
group = Group(ModelData.get_id('nodux_product_price_list_by_product',
'group_update_price_force'))
transaction = Transaction()
user_id = transaction.user
if user_id == 0:
user_id = transaction.context.get('user', user_id)
if user_id == 0:
return True
user = User(user_id)
return origin and group in user.groups
for lists in self.listas_precios:
if lists.fijo < self.cost_price:
if not in_group():
self.raise_user_error("No esta autorizado a actualizar el precio de la lista de precio")
self.raise_user_warning('precio_costo_menor',
'Precio de venta: "%s"'
'es menor al precio de costo "%s".', (str(lists.fijo), str(self.cost_price)))
class ListByProduct(ModelSQL, ModelView):
"List By Product"
__name__ = "product.list_by_product"
template = fields.Many2One('product.template', 'Product Template',
required=True, ondelete='CASCADE', select=True, states=STATES,
depends=DEPENDS)
lista_precio = fields.Many2One('product.price_list', 'Lista de Precio',
required=True, ondelete='CASCADE', select=True, states=STATES,
depends=DEPENDS)
fijo = fields.Numeric('Precio sin IVA', digits=(16, 6))
precio_venta = fields.Boolean('Definir como precio de VENTA')
product = fields.Many2One('product.product', 'Product Template')
fijo_con_iva = fields.Numeric('Precio con IVA', digits=(16, 6))
@classmethod
def __setup__(cls):
super(ListByProduct, cls).__setup__()
def get_rec_name(self, lista_precio):
return self.lista_precio.name
@classmethod
def search_rec_name(cls, lista_precio, clause):
return [('lista_precio',) + tuple(clause[1:])]
@fields.depends('_parent_template.cost_price', 'lista_precio', 'fijo',
'_parent_template.taxes_category', '_parent_template.category',
'_parent_template.id', 'parent_template.list_price')
def on_change_lista_precio(self):
pool = Pool()
res= {}
percentage = 0
precio_final = Decimal(0.0)
Taxes1 = pool.get('product.category-customer-account.tax')
Taxes2 = pool.get('product.template-customer-account.tax')
use_new_formula = False
if self.lista_precio:
if self.lista_precio.lines:
for line in self.lista_precio.lines:
if line.percentage > 0:
percentage = line.percentage/100
if line.use_new_formula == True:
use_new_formula = True
else:
use_new_formula = False
if self.template.cost_price:
if use_new_formula == True:
if self.lista_precio.definir_precio_tarjeta == True:
precio_final = self.template.list_price / (1 - percentage)
else:
precio_final = self.template.cost_price / (1 - percentage)
else:
if self.lista_precio.definir_precio_tarjeta == True:
precio_final = self.template.list_price * (1 + percentage)
else:
precio_final = self.template.cost_price * (1 + percentage)
if self.template.taxes_category == True:
if self.template.category.taxes_parent == True:
taxes1= Taxes1.search([('category','=', self.template.category.parent)])
taxes2 = Taxes2.search([('product','=', self.template)])
else:
taxes1= Taxes1.search([('category','=', self.template.category)])
else:
taxes1= Taxes1.search([('category','=', self.template.category)])
taxes2 = Taxes2.search([('product','=', self.template)])
if taxes1:
for t in taxes1:
iva = precio_final * t.tax.rate
elif taxes2:
for t in taxes2:
iva = precio_final * t.tax.rate
elif taxes3:
for t in taxes3:
iva = precio_final * t.tax.rate
precio_total = precio_final + iva
res['fijo'] = Decimal(str(round(precio_final, 6)))
res['fijo_con_iva'] = Decimal(str(round(precio_total, 6)))
return res
@fields.depends('_parent_template.cost_price', 'lista_precio', 'fijo',
'_parent_template.taxes_category', '_parent_template.category',
'_parent_template.id', 'fijo_con_iva', '_parent_template.list_price')
def on_change_fijo_con_iva(self):
pool = Pool()
res= {}
precio_total = self.fijo
Taxes1 = pool.get('product.category-customer-account.tax')
Taxes2 = pool.get('product.template-customer-account.tax')
iva = Decimal(0.0)
if self.fijo_con_iva:
if self.template.taxes_category == True:
if self.template.category.taxes_parent == True:
taxes1= Taxes1.search([('category','=', self.template.category.parent)])
taxes2 = Taxes2.search([('product','=', self.template)])
else:
taxes1= Taxes1.search([('category','=', self.template.category)])
else:
taxes1= Taxes1.search([('category','=', self.template.category)])
taxes2 = Taxes2.search([('product','=', self.template)])
if taxes1:
for t in taxes1:
iva = t.tax.rate
elif taxes2:
for t in taxes2:
iva = t.tax.rate
precio_total = self.fijo_con_iva /(1+iva)
res['fijo'] = Decimal(str(round(precio_total, 6)))
return res
@fields.depends('_parent_template.cost_price', 'lista_precio', 'fijo',
'_parent_template.taxes_category', '_parent_template.category',
'_parent_template.id', 'fijo_con_iva', '_parent_template.list_price')
def on_change_fijo(self):
pool = Pool()
res={}
precio_total_iva = self.fijo_con_iva
Taxes1 = pool.get('product.category-customer-account.tax')
Taxes2 = pool.get('product.template-customer-account.tax')
iva = Decimal(0.0)
if self.fijo_iva:
if self.template.taxes_category == True:
if self.template.category.taxes_parent == True:
taxes1 = Taxes1.search([('category','=', self.template.category.parent)])
taxes2 = Taxes2.search([('product', '=', self.template)])
else:
taxes1 = Taxes1.search([('category', '=', self.template.category)])
else:
taxes1 = Taxes1.search([('category', '=', self.template.category)])
taxes2 = Taxes2.search([('product', '=', self.template)])
if taxes1:
for t in taxes1:
iva = t.tax.rate
elif taxes2:
for t in taxes2:
iva = t.tax.rate
precio_total_con_iva = self.fijo*(1+iva)
res['fijo_con_iva'] = Decimal(str(round(precio_total_con_iva, 6)))
return res
@fields.depends('_parent_template.list_price', '_parent_template.id', 'fijo', 'precio_venta')
def on_change_precio_venta(self):
res= {}
res['list_price'] = self.fijo
self.template.list_price = res['list_price']
return res
class UpdatePriceListByProduct(ModelView):
'Update Price List By Product'
__name__ = 'nodux_product_price_list_by_product.update_price.start'
password = fields.Char('<PASSWORD>', required=True, size=20)
user = fields.Char('Usuario', required=True, readonly=True)
def hash_password(self, password):
if not password:
return ''
return getattr(self, 'hash_' + self.hash_method())(password)
@staticmethod
def hash_method():
return 'bcrypt' if bcrypt else 'sha1'
@classmethod
def hash_sha1(cls, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt = ''.join(random.sample(string.ascii_letters + string.digits, 8))
hash_ = hashlib.sha1(password + salt).hexdigest()
return '$'.join(['sha1', hash_, salt])
def check_password(self, password, hash_):
if not hash_:
return False
hash_method = hash_.split('$', 1)[0]
return getattr(self, 'check_' + hash_method)(password, hash_)
@classmethod
def check_sha1(cls, password, hash_):
if isinstance(password, unicode):
password = password.encode('utf-8')
if isinstance(hash_, unicode):
hash_ = hash_.encode('utf-8')
hash_method, hash_, salt = hash_.split('$', 2)
salt = salt or ''
assert hash_method == 'sha1'
return hash_ == hashlib.sha1(password + salt).hexdigest()
@classmethod
def hash_bcrypt(cls, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
hash_ = bcrypt.hashpw(password, bcrypt.gensalt())
return '$'.join(['bcrypt', hash_])
@classmethod
def check_bcrypt(cls, password, hash_):
if isinstance(password, unicode):
password = password.encode('utf-8')
if isinstance(hash_, unicode):
hash_ = hash_.encode('utf-8')
hash_method, hash_ = hash_.split('$', 1)
assert hash_method == 'bcrypt'
return hash_ == bcrypt.hashpw(password, hash_)
@fields.depends('password')
def on_change_password(self):
res = {}
User = Pool().get('res.user')
user = None
value = False
if self.password:
users = User.search([('password_hash', '!=', None)])
if users:
for u in users:
value = self.check_password(self.password, u.password_hash)
if value == True:
res['user'] = u.name
break
if value == False:
self.raise_user_error(u'Invalid password')
return res
class WizardPriceListByProduct(Wizard):
'Wizard Price List By Product'
__name__ = 'nodux_product_price_list_by_product.update_price'
start = StateView('nodux_product_price_list_by_product.update_price.start',
'nodux_product_price_list_by_product.update_price_list_start_view_form', [
Button('Cancel', 'end', 'tryton-cancel'),
Button('Ok', 'accept', 'tryton-ok', default=True),
])
accept = StateTransition()
def transition_accept(self):
pool = Pool()
User = pool.get('res.user')
Product = pool.get('product.template')
Variante = pool.get('product.product')
Variante = pool.get('product.product')
products = Product.browse(Transaction().context['active_ids'])
p = Product(Transaction().context['active_id'])
percentage = 0
precio_final = Decimal(0.0)
new_list_price = Decimal(0.0)
use_new_formula = False
def in_group():
pool = Pool()
ModelData = pool.get('ir.model.data')
User = pool.get('res.user')
Group = pool.get('res.group')
origin = str(p)
user = User(Transaction().user)
group = Group(ModelData.get_id('nodux_product_price_list_by_product',
'group_update_price_force'))
transaction = Transaction()
user_id = transaction.user
if user_id == 0:
user_id = transaction.context.get('user', user_id)
if user_id == 0:
return True
user = User(user_id)
return origin and group in user.groups
if not in_group():
self.raise_user_error("No esta autorizado a actualizar el precio de la lista de precio")
for product in products:
if product.listas_precios:
for listas in product.listas_precios:
if listas.lista_precio.lines:
for line in listas.lista_precio.lines:
if line.use_new_formula == True:
use_new_formula = True
else:
use_new_formula = False
if line.percentage:
if line.percentage > 0:
percentage = line.percentage/100
else:
self.raise_user_error('No ha definido el porcentaje de ganancia en las listas de precio')
if product.cost_price:
if use_new_formula == True:
if listas.definir_precio_tarjeta == True:
precio_final = product.list_price / (1 - percentage)
else:
precio_final = product.cost_price / (1 - percentage)
else:
if listas.definir_precio_tarjeta == True:
precio_final = product.list_price * (1 + percentage)
else:
precio_final = product.cost_price * (1 + percentage)
if listas.precio_venta == True:
new_list_price = precio_final
listas.fijo = precio_final
listas.save()
product.list_price = new_list_price
product.save()
return 'end'
<file_sep>#! -*- coding: utf8 -*-
#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from trytond.pool import *
from trytond.report import Report
from trytond.transaction import Transaction
from trytond.modules.company import CompanyReport
from trytond.pool import Pool
from decimal import Decimal
from trytond.model import ModelSQL, Workflow, fields, ModelView
from trytond.pyson import Bool, Eval, Or, If, Id
from trytond.wizard import (Wizard, StateView, StateAction, StateTransition,
Button)
try:
import bcrypt
except ImportError:
bcrypt = None
import random
import hashlib
import string
__all__ = ['PriceList', 'UpdateListByProduct', 'WizardListByProduct', 'PriceListLine']
__metaclass__ = PoolMeta
class PriceList():
'Price List'
__name__ = 'product.price_list'
incluir_lista = fields.Boolean('Incluir lista de precio en producto', states={
'readonly': (Eval('definir_precio_venta', True)) | (Eval('definir_precio_tarjeta', True))
})
definir_precio_venta = fields.Boolean('Definir como precio de venta', help="Definir como precio de venta principal")
definir_precio_tarjeta = fields.Boolean('Precio tarjeta de credito', help="Definir como precio de tarjeta de credito")
@classmethod
def __setup__(cls):
super(PriceList, cls).__setup__()
@fields.depends('incluir_lista', 'definir_precio_venta')
def on_change_definir_precio_venta(self):
res = {}
if self.definir_precio_venta == True:
res['incluir_lista'] = True
else:
if self.incluir_lista:
res['incluir_lista'] = self.incluir_lista
else:
res['incluir_lista'] = False
return res
@fields.depends('incluir_lista', 'definir_precio_tarjeta')
def on_change_definir_precio_tarjeta(self):
res = {}
if self.definir_precio_tarjeta == True:
res['incluir_lista'] = True
else:
if self.incluir_lista:
res['incluir_lista'] = self.incluir_lista
else:
res['incluir_lista'] = False
return res
@classmethod
def validate(cls, price_lists):
for price_list in price_lists:
for price_list in price_lists:
super(PriceList, cls).validate(price_lists)
def pre_validate(self):
pool = Pool()
User = pool.get('res.user')
Product = pool.get('product.template')
Variante = pool.get('product.product')
def in_group():
pool = Pool()
ModelData = pool.get('ir.model.data')
User = pool.get('res.user')
Group = pool.get('res.group')
origin = str(self)
user = User(Transaction().user)
group = Group(ModelData.get_id('nodux_product_price_list_by_product',
'group_update_price_force'))
transaction = Transaction()
user_id = transaction.user
if user_id == 0:
user_id = transaction.context.get('user', user_id)
if user_id == 0:
return True
user = User(user_id)
return origin and group in user.groups
if not in_group():
self.raise_user_error("No esta autorizado a cambiar/crear una lista de precio")
class PriceListLine():
'Price List Line'
__name__ = 'product.price_list.line'
new_formula = fields.Char('Formula', readonly = True)
use_new_formula = fields.Boolean('Utilizar formula')
@classmethod
def __setup__(cls):
super(PriceListLine, cls).__setup__()
@staticmethod
def default_new_formula():
return 'costo/(1-%)'
@fields.depends('percentage', 'formula', '_parent_price_list.definir_precio_tarjeta')
def on_change_percentage(self):
pool = Pool()
res= {}
if self.percentage:
if self.percentage > 0:
percentage = self.percentage/100
p = str(percentage)
if self.price_list.definir_precio_tarjeta == True:
formula = 'product.list_price * (1 + ' +p+')'
else:
formula = 'product.cost_price * (1 + ' +p+')'
res['formula'] = formula
else:
res['formula'] = ""
else:
res['formula'] = ""
return res
@fields.depends('percentage', 'formula', 'new_formula', 'use_new_formula',
'_parent_price_list.definir_precio_tarjeta')
def on_change_use_new_formula(self):
pool = Pool()
res= {}
p = '0'
if self.percentage > 0:
percentage = self.percentage/100
p = str(percentage)
if self.use_new_formula:
if self.use_new_formula == True:
if self.price_list.definir_precio_tarjeta == True:
formula = 'product.list_price / (1 - ' +p+')'
else:
formula = 'product.cost_price / (1 - ' +p+')'
res['formula'] = formula
else:
if self.price_list.definir_precio_tarjeta == True:
formula = 'product.list_price * (1 + ' +p+')'
else:
formula = 'product.cost_price * (1 +' +p+')'
res['formula'] = formula
else:
if self.price_list.definir_precio_tarjeta == True:
formula = 'product.list_price * (1 + '+p+')'
else:
formula = 'product.cost_price * (1 +' +p+')'
res['formula'] = formula
return res
class UpdateListByProduct(ModelView):
'Update List By Product'
__name__ = 'nodux_product_price_list_by_product.update.start'
password = fields.Char('Password', required=True, size=20)
user = fields.Char('Usuario', required=True, readonly=True)
def hash_password(self, password):
if not password:
return ''
return getattr(self, 'hash_' + self.hash_method())(password)
@staticmethod
def hash_method():
return 'bcrypt' if bcrypt else 'sha1'
@classmethod
def hash_sha1(cls, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt = ''.join(random.sample(string.ascii_letters + string.digits, 8))
hash_ = hashlib.sha1(password + salt).hexdigest()
return '$'.join(['sha1', hash_, salt])
def check_password(self, password, hash_):
if not hash_:
return False
hash_method = hash_.split('$', 1)[0]
return getattr(self, 'check_' + hash_method)(password, hash_)
@classmethod
def check_sha1(cls, password, hash_):
if isinstance(password, unicode):
password = <PASSWORD>.encode('<PASSWORD>')
if isinstance(hash_, unicode):
hash_ = hash_.encode('utf-8')
hash_method, hash_, salt = hash_.split('$', 2)
salt = salt or ''
assert hash_method == 'sha1'
return hash_ == hashlib.sha1(password + salt).hexdigest()
@classmethod
def hash_bcrypt(cls, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
hash_ = bcrypt.hashpw(password, bcrypt.gensalt())
return '$'.join(['bcrypt', hash_])
@classmethod
def check_bcrypt(cls, password, hash_):
if isinstance(password, unicode):
password = password.encode('utf-8')
if isinstance(hash_, unicode):
hash_ = hash_.encode('utf-8')
hash_method, hash_ = hash_.split('$', 1)
assert hash_method == 'bcrypt'
return hash_ == bcrypt.hashpw(password, hash_)
@fields.depends('password')
def on_change_password(self):
res = {}
User = Pool().get('res.user')
user = None
value = False
if self.password:
users = User.search([('password_hash', '!=', None)])
if users:
for u in users:
value = self.check_password(self.password, u.password_hash)
if value == True:
res['user'] = u.name
break
if value == False:
self.raise_user_error(u'Contraseña no valida')
return res
class WizardListByProduct(Wizard):
'Wizard List By Product'
__name__ = 'nodux_product_price_list_by_product.update'
start = StateView('nodux_product_price_list_by_product.update.start',
'nodux_product_price_list_by_product.update_list_start_view_form', [
Button('Cancel', 'end', 'tryton-cancel'),
Button('Ok', 'accept', 'tryton-ok', default=True),
])
accept = StateTransition()
def transition_accept(self):
pool = Pool()
User = pool.get('res.user')
Product = pool.get('product.template')
Variante = pool.get('product.product')
Taxes1 = pool.get('product.category-customer-account.tax')
Taxes2 = pool.get('product.template-customer-account.tax')
ListByProduct = pool.get('product.list_by_product')
PriceList = pool.get('product.price_list')
priceslists = PriceList.browse(Transaction().context['active_ids'])
pls = PriceList(Transaction().context['active_id'])
user = User(Transaction().user)
incluido = False
lineas = None
def in_group():
pool = Pool()
ModelData = pool.get('ir.model.data')
User = pool.get('res.user')
Group = pool.get('res.group')
origin = str(pls)
user = User(Transaction().user)
group = Group(ModelData.get_id('nodux_product_price_list_by_product',
'group_update_price_force'))
transaction = Transaction()
user_id = transaction.user
if user_id == 0:
user_id = transaction.context.get('user', user_id)
if user_id == 0:
return True
user = User(user_id)
return origin and group in user.groups
if not in_group():
self.raise_user_error("No esta autorizado a agregar las listas de precio en todos los productos")
lineas = []
for pricelist in priceslists:
if pricelist.incluir_lista == False:
pass
elif pricelist.incluir_lista == True:
products = Product.search([('id', '>', 0)])
for p in products:
variantes = Variante.search([('template', '=', p.id)])
for v in variantes:
variante = v
if p.listas_precios:
for listas in p.listas_precios:
if pricelist == listas.lista_precio:
incluido = True
break
if incluido == True:
pass
else:
for line in pricelist.lines:
if line.percentage:
if line.percentage > 0:
percentage = line.percentage/100
if line.use_new_formula == True:
if line.price_list.definir_precio_tarjeta == True:
precio_final = p.list_price / (1 - percentage)
else:
precio_final = p.cost_price / (1 - percentage)
else:
if line.price_list.definir_precio_tarjeta == True:
precio_final = p.list_price * (1 + percentage)
else:
precio_final = p.cost_price * (1 + percentage)
else:
self.raise_user_error('No ha definido el porcentaje, modifique la lista de precio')
if user.company.currency:
precio_final = user.company.currency.round(precio_final)
if p.taxes_category == True:
if p.category.taxes_parent == True:
taxes1= Taxes1.search([('category','=', p.category.parent)])
taxes2 = Taxes2.search([('product','=', p)])
else:
taxes1= Taxes1.search([('category','=', p.category)])
else:
taxes1= Taxes1.search([('category','=', p.category)])
taxes2 = Taxes2.search([('product','=', p)])
if taxes1:
for t in taxes1:
iva = precio_final * t.tax.rate
elif taxes2:
for t in taxes2:
iva = precio_final * t.tax.rate
precio_total = precio_final + iva
if pricelist.definir_precio_venta == True:
p.list_price = precio_final
p.list_price_with_tax = precio_total
p.save()
lineas.append({
'template': p.id,
'lista_precio': pricelist.id,
'fijo' : precio_final,
'fijo_con_iva': precio_total,
'precio_venta': pricelist.definir_precio_venta,
'product': variante.id
})
else:
for line in pricelist.lines:
if line.percentage:
if line.percentage > 0:
percentage = line.percentage/100
if line.use_new_formula == True:
if line.price_list.definir_precio_tarjeta == True:
precio_final = p.list_price / (1 - percentage)
else:
precio_final = p.cost_price / (1 - percentage)
else:
if line.price_list.definir_precio_tarjeta == True:
precio_final = p.list_price * (1 + percentage)
else:
precio_final = p.cost_price * (1 + percentage)
else:
self.raise_user_error('Debe asignar el porcentaje de ganancia en la lista de precio')
if user.company.currency:
precio_final = user.company.currency.round(precio_final)
if p.taxes_category == True:
if p.category.taxes_parent == True:
taxes1= Taxes1.search([('category','=', p.category.parent)])
taxes2 = Taxes2.search([('product','=', p)])
else:
taxes1= Taxes1.search([('category','=', p.category)])
else:
taxes1= Taxes1.search([('category','=', p.category)])
taxes2 = Taxes2.search([('product','=', p)])
if taxes1:
for t in taxes1:
iva = precio_final * t.tax.rate
elif taxes2:
for t in taxes2:
iva = precio_final * t.tax.rate
precio_total = precio_final + iva
if pricelist.definir_precio_venta == True:
p.list_price = precio_final
p.list_price_with_tax = precio_total
p.save()
lineas.append({
'template': p.id,
'lista_precio': pricelist.id,
'fijo' : precio_final,
'fijo_con_iva': precio_total,
'precio_venta': pricelist.definir_precio_venta,
'product' : variante.id
})
listas_precios = ListByProduct.create(lineas)
return 'end'
<file_sep>#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from trytond.pool import Pool
from .product import *
from .price_list import *
def register():
Pool.register(
Template,
ListByProduct,
PriceList,
PriceListLine,
UpdateListByProduct,
UpdatePriceListByProduct,
module='nodux_product_price_list_by_product', type_='model')
Pool.register(
WizardListByProduct,
WizardPriceListByProduct,
module='nodux_product_price_list_by_product', type_='wizard')
| f450b023fc95bf044ecbd5d9012126ab845d2d0b | [
"Python"
] | 3 | Python | fabyc/nodux_product_price_list_by_product | 27afbfc08c0f783d5316ce2d8fc784f738c07b86 | 8508a8e0c7cec025880c37e8acd0f7f603139ade | |
refs/heads/main | <repo_name>onsissond/Stories<file_sep>/Stories/Core/Services/NotificationService/NotificationProvider.swift
//
// Created by onsissond.
//
import RxSwift
import UserNotifications
private let _storyIdentifierPrefix = "Story Notification"
struct NotificationProvider {
var setupNotification: ([Story]) -> Single<SetupNotificationResult>
var removeStoryNotifications: () -> Void
}
extension NotificationProvider {
enum SetupNotificationResult {
case failure
case success
}
}
extension NotificationProvider {
static var live = NotificationProvider(
setupNotification: _setupNotification(),
removeStoryNotifications: { _removeStoryNotifications() }
)
}
extension NotificationProvider {
private static func _setupNotification(
notificationCenter: UNUserNotificationCenter = .current(),
options: UNAuthorizationOptions = [.alert, .sound],
calendar: Calendar = .current,
maxNotifications: Int = 10
) -> ([Story]) -> Single<SetupNotificationResult> {
return { stories in
.zip(stories.prefix(maxNotifications).map(
_setupStoryNotification(
notificationCenter: notificationCenter,
options: options,
calendar: calendar
)
))
.map { results in
results.allSatisfy { $0 == .success } ? .success : .failure
}
}
}
private static func _setupStoryNotification(
notificationCenter: UNUserNotificationCenter = .current(),
options: UNAuthorizationOptions = [.alert, .sound],
calendar: Calendar = .current
) -> (Story) -> Single<SetupNotificationResult> {
return { story in
.create { subscriber in
notificationCenter.requestAuthorization(
options: options
) { didAllow, _ in
guard didAllow else {
subscriber(.success(.failure))
return
}
notificationCenter.add(.init(
story: story,
calendar: calendar,
identifier: _storyIdentifierPrefix
)) { error in
guard error == nil else {
subscriber(.success(.failure))
return
}
subscriber(.success(.success))
}
}
return Disposables.create()
}
.subscribeOn(SerialDispatchQueueScheduler(qos: .utility))
.observeOn(MainScheduler.instance)
}
}
private static func _removeStoryNotifications(
notificationCenter: UNUserNotificationCenter = .current()
) {
notificationCenter.getPendingNotificationRequests { requests in
let storiesIdentifiers = requests
.map(\.identifier)
.filter { $0.hasPrefix(_storyIdentifierPrefix) }
notificationCenter.removePendingNotificationRequests(
withIdentifiers: storiesIdentifiers
)
}
}
}
private extension UNMutableNotificationContent {
convenience init(story: Story) {
self.init()
title = L10n.Notification.title
subtitle = story.preview.title
body = story.preview.description ?? ""
}
}
private extension UNNotificationRequest {
convenience init(story: Story, calendar: Calendar, identifier: String) {
let content = UNMutableNotificationContent(story: story)
var triggerDate = calendar.dateComponents(
[.year, .month, .day, .hour, .minute, .second],
from: story.publishDate
)
triggerDate.hour = 12
let trigger = UNCalendarNotificationTrigger(
dateMatching: triggerDate,
repeats: false
)
let identifier = identifier.appending(story.id)
self.init(
identifier: identifier,
content: content,
trigger: trigger
)
}
}
<file_sep>/Stories/UI/StoryContentView/SummaryContentStoryView/SummaryContentStoryView.swift
//
// Created by onsissond.
//
import UIKit
import RxSwift
import RxCocoa
final class SummaryContentStoryView: UIView {
private let _disposeBag = DisposeBag()
private lazy var _dismissButton: UIButton = .makeStoryDismiss()
private lazy var _imageView: UIImageView = {
$0.contentMode = .scaleAspectFill
return $0
}(UIImageView())
private lazy var _stackView: UIStackView = {
$0.axis = .vertical
$0.alignment = .fill
$0.spacing = 8
return $0
}(UIStackView())
private lazy var _headerView = SummaryHeaderStoryView()
private lazy var _flightView = SummaryInfoStoryView()
private lazy var _accommodationView = SummaryInfoStoryView()
private lazy var _nutritionView = SummaryInfoStoryView()
private lazy var _entertainmentView = SummaryInfoStoryView()
private lazy var _summaryFooterStoryView = SummaryFooterStoryView()
private lazy var _buyTicketButton: UIButton = {
$0.setTitle(L10n.SummaryContentStoryView.BuyButton.title, for: .normal)
return $0
}(UIButton())
private lazy var _feedbackButton: UIButton = {
$0.setTitle("Неинтересно", for: .normal)
$0.setTitleColor(.lightGray, for: .normal)
return $0
}(UIButton())
private lazy var _gradientLayer: CAGradientLayer = {
let layer = CAGradientLayer()
layer.startPoint = CGPoint(x: 1, y: 0)
layer.endPoint = CGPoint(x: 1, y: 1)
layer.colors = [
UIColor.clear.cgColor,
UIColor(hex: 0xF1D2F).withAlphaComponent(0.4).cgColor,
UIColor(hex: 0xF1D2F).withAlphaComponent(0.6).cgColor,
UIColor(hex: 0x11834).cgColor
]
layer.locations = [0, 0.08, 0.65, 1]
return layer
}()
private var _viewState: SummaryStoryContent?
init() {
super.init(frame: .zero)
_setupSubviews()
_setupLayout()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func layoutSubviews() {
super.layoutSubviews()
_gradientLayer.frame = frame
}
private func _setupLayout() {
_imageView.layer.insertSublayer(_gradientLayer, at: 0)
}
private func _setupSubviews() {
addSubview(_imageView)
_imageView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_imageView.topAnchor.constraint(equalTo: topAnchor),
_imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
_imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
_imageView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
addSubview(_stackView)
if hasEyebrow {
_stackView.topAnchor.constraint(equalTo: topAnchor, constant: 80).isActive = true
} else {
_stackView.topAnchor.constraint(equalTo: topAnchor, constant: 62).isActive = true
}
_stackView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16).isActive = true
_stackView.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16).isActive = true
if hasEyebrow {
_stackView.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -32).isActive = true
} else {
_stackView.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -16).isActive = true
}
_stackView.translatesAutoresizingMaskIntoConstraints = false
addSubview(_dismissButton)
_dismissButton.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_dismissButton.topAnchor.constraint(equalTo: _stackView.topAnchor),
_dismissButton.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16)
])
_stackView.addArrangedSubview(_headerView)
_stackView.addArrangedSubview(_flightView)
_stackView.addArrangedSubview(_accommodationView)
_stackView.addArrangedSubview(_nutritionView)
_stackView.addArrangedSubview(_entertainmentView)
_stackView.addArrangedSubview(UIView())
_stackView.addArrangedSubview(_summaryFooterStoryView)
_stackView.addArrangedSubview(_buyTicketButton)
_stackView.addArrangedSubview(_feedbackButton)
}
}
extension SummaryContentStoryView: StoryContentView {
func render(viewState: StoryContent) -> Bool {
guard case .summary(let viewState) = viewState else {
return false
}
_viewState = viewState
_imageView.kf.setImage(
with: viewState.image.imageURL,
options: [.transition(.fade(0.5))]
)
_headerView.render(viewState: .init(
period: viewState.period.value,
title: viewState.title
))
_flightView.render(viewState: .trip(
from: viewState.trip.from,
to: viewState.trip.to
))
_accommodationView.render(viewState: .accommodation(
title: viewState.accommodation.title,
description: viewState.accommodation.description
))
_nutritionView.render(viewState: .nutrition(
title: viewState.nutrition.title,
description: viewState.nutrition.description
))
_entertainmentView.render(viewState: .entertainment(
title: viewState.entertainment.title,
description: viewState.entertainment.description
))
_summaryFooterStoryView.render(viewState: .init(
title: viewState.price.title,
subtitle: viewState.price.subtitle,
price: viewState.price.value
))
return true
}
}
extension SummaryContentStoryView {
enum Event {
case dismiss
case openDeeplink(URL)
case requestFeedback
}
var events: ControlEvent<Event> {
ControlEvent(events: Observable.merge([
_dismissButton.rx.controlEvent(.touchUpInside).map { _ in .dismiss },
_buyTicketButton.rx.controlEvent(.touchUpInside).map { [weak self] in
self?._viewState?.deeplink
}
.filterNil()
.map(Event.openDeeplink),
_feedbackButton.rx.controlEvent(.touchUpInside).map { _ in .requestFeedback }
]))
}
}
<file_sep>/Stories/UI/StoriesViewController/SwitchableView.swift
//
// Created by onsissond.
//
import UIKit
class SwitchableView: UIView {
private var _contentViews: [StoryContentView]
init(contentViews: [StoryContentView]) {
_contentViews = contentViews
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
_contentViews.forEach { view in
addSubview(view)
view.topAnchor.constraint(equalTo: topAnchor).isActive = true
view.leadingAnchor.constraint(equalTo: leadingAnchor).isActive = true
view.trailingAnchor.constraint(equalTo: trailingAnchor).isActive = true
view.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true
view.translatesAutoresizingMaskIntoConstraints = false
}
}
}
extension SwitchableView: StoryContentView {
func render(viewState: StoryContent) -> Bool {
_contentViews.forEach {
$0.isHidden = !$0.render(viewState: viewState)
}
return true
}
}
<file_sep>/Stories/Core/Domain/StoryContent.swift
//
// Created by onsissond.
//
import UIKit
enum StoryContent: Equatable {
case regular(RegularStoryContent)
case summary(SummaryStoryContent)
}
struct ImageStoryContent: Equatable {
var imageURL: URL
}
struct RegularStoryContent: Equatable {
struct Info: Equatable {
var title: String
var subtitle: String
}
var image: ImageStoryContent
var title: String = ""
var description: String = ""
var periodInfo: Info?
var priceInfo: Info?
}
struct SummaryStoryContent: Equatable {
var image: ImageStoryContent
var period: Period
var title: String = ""
var price: Price
var deeplink: URL
var trip: Trip
var accommodation: Attachment
var nutrition: Attachment
var entertainment: Attachment
}
extension SummaryStoryContent {
struct Period: Equatable {
var title: String
var value: String
}
struct Price: Equatable {
var title: String
var subtitle: String
var value: String
}
struct Trip: Equatable {
var to: TripInfo
var from: TripInfo
}
struct Attachment: Equatable {
var title: String
var description: String
}
struct TripInfo: Equatable {
var transport: Transport
var title: String
var subtitle: String
}
}
// MARK: - Decodable
extension StoryContent: Decodable {
enum CodingKeys: String, CodingKey {
case type
}
enum ContentType: String, Decodable {
case regular
case summary
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
let type = try container.decode(ContentType.self, forKey: .type)
switch type {
case .regular:
self = .regular(try RegularStoryContent(from: decoder))
case .summary:
self = .summary(try SummaryStoryContent(from: decoder))
}
}
}
extension RegularStoryContent: Decodable {
enum CodingKeys: String, CodingKey {
case title
case text
case price
case period
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
image = try ImageStoryContent(from: decoder)
title = try container.decode(String.self, forKey: .title)
description = try container.decode(String.self, forKey: .text)
priceInfo = try container.decodeIfPresent(RegularStoryContent.Info.self, forKey: .price)
periodInfo = try container.decodeIfPresent(RegularStoryContent.Info.self, forKey: .period)
}
}
extension RegularStoryContent.Info: Decodable {
enum CodingKeys: String, CodingKey {
case title
case subtitle = "value"
}
}
extension SummaryStoryContent: Decodable {
enum CodingKeys: String, CodingKey {
case title
case period
case price
case trip
case deeplink
case accommodation
case nutrition
case entertainment
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
image = try ImageStoryContent(from: decoder)
title = try container.decode(String.self, forKey: .title)
price = try container.decode(Price.self, forKey: .price)
period = try container.decode(Period.self, forKey: .period)
deeplink = try container.decode(URL.self, forKey: .deeplink)
trip = try container.decode(Trip.self, forKey: .trip)
accommodation = try container.decode(Attachment.self, forKey: .accommodation)
nutrition = try container.decode(Attachment.self, forKey: .nutrition)
entertainment = try container.decode(Attachment.self, forKey: .entertainment)
}
}
extension SummaryStoryContent.Period: Decodable {}
extension SummaryStoryContent.Price: Decodable {}
extension SummaryStoryContent.Trip: Decodable {}
extension SummaryStoryContent.TripInfo: Decodable {
enum CodingKeys: String, CodingKey {
case type
case title
case description
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
transport = try container.decode(Transport.self, forKey: .type)
title = try container.decode(String.self, forKey: .title)
subtitle = try container.decode(String.self, forKey: .description)
}
}
extension SummaryStoryContent.Attachment: Decodable {}
extension ImageStoryContent: Decodable {
enum CodingKeys: String, CodingKey {
case image
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
imageURL = try container.decode(URL.self, forKey: .image)
}
}
<file_sep>/Stories/UI/FeedbackViewController/WKWebViewController.swift
//
// Created by onsissond.
//
import RxSwift
import WebKit
class WKWebViewController: UIViewController {
private var _webViewNavigationHandler: ((WKNavigationActionPolicy) -> Void)?
private lazy var _webView: WKWebView = {
let webView = WKWebView(frame: .zero)
webView.navigationDelegate = self
return webView
}()
private lazy var _indicator: UIActivityIndicatorView = {
let indicator = UIActivityIndicatorView()
indicator.hidesWhenStopped = true
indicator.startAnimating()
return indicator
}()
override func viewDidLoad() {
super.viewDidLoad()
_addSubviews()
}
private func _addSubviews() {
_webView.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(_webView)
NSLayoutConstraint.activate([
_webView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
_webView.topAnchor.constraint(equalTo: view.topAnchor),
_webView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
_webView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
_indicator.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(_indicator)
NSLayoutConstraint.activate([
_indicator.centerYAnchor.constraint(equalTo: view.centerYAnchor),
_indicator.centerXAnchor.constraint(equalTo: view.centerXAnchor)
])
}
}
extension WKWebViewController: WKNavigationDelegate {
func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
_indicator.stopAnimating()
}
}
extension WKWebViewController {
func render(viewState: URL) {
_webView.load(.init(url: viewState))
}
}
<file_sep>/Stories/UI/StoriesViewController/StoriesViewController.swift
//
// Created by onsissond.
//
import UIKit
import RxSwift
import ComposableArchitecture
enum StoriesCellIdentifier: String {
case regular
}
class StoriesViewController: UIViewController {
private let _store: StoriesSystem.LocalStore
private let _disposeBag = DisposeBag()
private lazy var _viewStore = ViewStore(_store)
private var _viewTranslation = CGPoint.zero
private lazy var collectionView: UICollectionView = {
$0.bounces = false
$0.isScrollEnabled = false
$0.isPagingEnabled = true
$0.showsHorizontalScrollIndicator = false
$0.register(
StoryCell.self,
forCellWithReuseIdentifier: StoriesCellIdentifier.regular.rawValue
)
$0.delegate = self
$0.dataSource = self
return $0
}(UICollectionView(frame: .zero, collectionViewLayout: .cube))
init(store: StoriesSystem.LocalStore) {
_store = store
super.init(nibName: nil, bundle: nil)
modalPresentationStyle = .overFullScreen
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
_setupGesturesRecognizers()
_setupSubviews()
_setupSubscriptions()
_startCurrentStory()
}
private func _startCurrentStory() {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
if self._store.state.currentStory != 0 {
self.collectionView.selectItem(
at: .init(row: self._store.state.currentStory, section: 0),
animated: false,
scrollPosition: .centeredHorizontally
)
}
self._viewStore.send(.storyAction(
storyIndex: self._viewStore.state.currentStory,
action: .run
))
}
}
private func _setupSubscriptions() {
_viewStore.publisher.map(\.currentStory)
.distinctUntilChanged()
.bind(onNext: { [weak self] currentStory in
self?.collectionView.selectItem(
at: IndexPath(row: currentStory, section: 0),
animated: true,
scrollPosition: .centeredHorizontally
)
})
.disposed(by: _disposeBag)
_viewStore.publisher.map(\.feedbackAlert)
.distinctUntilChanged()
.filterNil()
.bind(onNext: { [weak self] in
guard let self = self else { return }
self.present(
AlertBuilder()
.with(alertState: $0, sendAction: self._viewStore.send)
.build(),
animated: true
)
})
.disposed(by: _disposeBag)
_viewStore.publisher.map(\.feedbackURL)
.distinctUntilChanged()
.filterNil()
.bind(onNext: { [weak self] in
let webView = WKWebViewController()
webView.presentationController?.delegate = self
webView.render(viewState: $0)
self?.present(webView, animated: true, completion: {
self?._viewStore.send(.launchedFeedback)
})
})
.disposed(by: _disposeBag)
}
private func _setupSubviews() {
view.addSubview(collectionView)
collectionView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
collectionView.topAnchor.constraint(equalTo: view.topAnchor),
collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
collectionView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
}
extension StoriesViewController {
private func _setupGesturesRecognizers() {
let longPressRecognizer = UILongPressGestureRecognizer(
target: self, action: #selector(longPressed)
)
longPressRecognizer.minimumPressDuration = 0.2
view.addGestureRecognizer(longPressRecognizer)
view.addGestureRecognizer(UIPanGestureRecognizer(
target: self, action: #selector(handleDismiss)
))
}
@objc private func longPressed(sender: UILongPressGestureRecognizer) {
guard let indexPath = collectionView.indexPathsForVisibleItems.first else {
return
}
if sender.state == .began {
_viewStore.send(.storyAction(storyIndex: indexPath.row, action: .pause))
} else if sender.state == .ended {
_viewStore.send(.storyAction(storyIndex: indexPath.row, action: .continue))
}
}
@objc func handleDismiss(sender: UIPanGestureRecognizer) {
switch sender.state {
case .changed:
_viewTranslation = sender.translation(in: view)
if _viewTranslation.y < 0 { break }
_viewStore.send(
.storyAction(storyIndex: _viewStore.currentStory, action: .pause)
)
UIView.animate(
withDuration: 0.5,
delay: 0,
usingSpringWithDamping: 0.7,
initialSpringVelocity: 1,
options: .curveEaseOut,
animations: {
self.view.transform = CGAffineTransform(
translationX: 0,
y: self._viewTranslation.y
)
})
case .ended:
if _viewTranslation.y < 200 {
UIView.animate(
withDuration: 0.5,
delay: 0,
options: .curveEaseOut,
animations: { [weak self] in
self?.view.transform = .identity
},
completion: { [weak self] _ in
guard let self = self else { return }
self._viewStore.send(.storyAction(
storyIndex: self._viewStore.currentStory,
action: .continue
))
})
} else {
_viewStore.send(.storyAction(
storyIndex: _viewStore.currentStory,
action: .dismiss
))
}
default:
break
}
}
}
extension StoriesViewController: UICollectionViewDataSource {
func numberOfSections(in collectionView: UICollectionView) -> Int {
1
}
func collectionView(
_ collectionView: UICollectionView,
numberOfItemsInSection section: Int
) -> Int {
_store.state.stories.count
}
}
extension StoriesViewController: UICollectionViewDelegateFlowLayout {
func collectionView(
_ collectionView: UICollectionView,
cellForItemAt indexPath: IndexPath
) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(
withReuseIdentifier: StoriesCellIdentifier.regular.rawValue,
for: indexPath
)
if let cell = cell as? StoryCell {
cell.render(store: _store.scope(
state: \.stories[indexPath.row],
action: { .storyAction(storyIndex: indexPath.row, action: $0) }
))
}
return cell
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
sizeForItemAt indexPath: IndexPath
) -> CGSize {
view.bounds.size
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
insetForSectionAt section: Int
) -> UIEdgeInsets {
.zero
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
minimumLineSpacingForSectionAt section: Int
) -> CGFloat {
0
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
minimumInteritemSpacingForSectionAt section: Int
) -> CGFloat {
0
}
}
extension StoriesViewController: UIAdaptivePresentationControllerDelegate {
func presentationControllerDidDismiss(
_ presentationController: UIPresentationController
) {
_viewStore.send(.storyAction(
storyIndex: _viewStore.state.currentStory,
action: .continue
))
}
}
extension AlertState where Action == StoriesSystem.Action {
static var feedback = AlertState<StoriesSystem.Action>(
title: L10n.Alert.Feedback.title,
message: L10n.Alert.Feedback.message,
primaryButton: .cancel(
L10n.Alert.Feedback.Button.cancel,
send: .dismissFeedbackAlert
),
secondaryButton: .default(
L10n.Alert.Feedback.Button.ok,
send: .launchFeedback
)
)
}
<file_sep>/Stories/Core/API/FetchStoriesRequest.swift
//
// Created by onsissond.
//
import Moya
struct FetchStoriesRequest: TargetType {
var baseURL: URL {
URL(string: "http://fakeurl/")!
}
var path: String {
"v1/stories/config/"
}
var method: Moya.Method {
.get
}
var task: Task {
.requestPlain
}
var headers: [String: String]? {
nil
}
var sampleData: Data {
"""
[
{
"id": "1",
"title": "Сочи",
"subtitle": "Неделя за 50 000 ₽ на одного",
"thumb": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F1.jpg?alt=media&token=<PASSWORD>",
"publishDate": "2021-05-21",
"expireDate": "2021-05-23",
"pages": [
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F1.jpg?alt=media",
"title": "Погода",
"text": "Сухая солнечная погода обычно устанавливается в Сочи к середине апреля. Температура держится в районе 15 градусов тепла, но возможны пасмурные и дождливые дни, так что ветровка и непромокаемая обувь нужны обязательно."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F2.jpg?alt=media",
"title": "Парк «Дендрарий»",
"text": "Коллекция знаменитого сочинского дендрария насчитывает около 1800 деревьев, цветов и кустарников. Уделите прогулке по парку не менее трёх часов. Взрослый билет стоит 250 ₽, детский — 120 ₽; экскурсия на электромобиле — 150 ₽ с человека; билет на канатку между верхней и нижней частями парка — 350 ₽ взрослый и 200 ₽ детский."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F3.jpg?alt=media",
"title": "Морской вокзал и Зимний театр",
"text": "Главные архитектурные символы Сочи и идеальные образцы сталинского ампира. Прогулка от Морпорта до театра по улице Орджоникидзе займёт полчаса. По пути загляните в легендарную хинкальную «Белые ночи» (средник чек на человека — 1000 ₽ с напитком)."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F4.jpg?alt=media",
"title": "Тисо-самшитовая роща",
"text": "Для прогулки на природе отлично подойдёт тисо-самшитовая роща — небольшой реликтовый лес с оборудованной тропой. Лучше всего зайти с главного входа и закончить каньоном «Чёртовы ворота», где можно искупаться и пообедать на базе отдыха. По пути увидите развалины Хостинской крепости VIII века. Прогулка по роще взрослым обойдётся в 300 ₽, детям до 14 лет — 150 ₽, до 7 лет — бесплатно."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F5.jpg?alt=media",
"title": "Где жить",
"text": "От отеля Allure of the Seа (от 4000 ₽/ночь) удобно добираться до тисо-самшитовой рощи, агурских водопадов, Орлиных скал, горы Ахун. Заказать экскурсию можно в отеле. Есть детский клуб, пинг-понг, бильярд, бассейн, столовая и буфет. До пляжа — 400 метров по тенистой аллее.\\n\\nБолее бюджетный вариант — GRACE O’DIN (от 1900 ₽/ночь) на Северной в пяти минутах от вокзала. До основных достопримечательностей отсюда можно добраться пешком."
},
{
"type": "summary",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Fsochi%2F6.jpg?alt=media",
"title": "<NAME>",
"price": {
"title": "Бюджет поездки:",
"subtitle": "в расчете на одного человека",
"value": "50 000 ₽"
},
"period": {
"title": "Когда",
"value": "27 мар - 1 апр"
},
"deeplink": "https://www.google.com",
"trip": {
"to": {
"type": "avia",
"title": "Уральские авиалинии от 4 156 Р",
"description": "туда 27 марта в 20:00"
},
"from": {
"type": "avia",
"title": "Уральские авиалинии от 5 000 Р",
"description": "обратно 1 апреля в 13:40"
}
},
"accommodation": {
"title": "11 000–24 000 ₽",
"description": "Неделя в двухместном номере с завтраком"
},
"nutrition": {
"title": "1000 ₽",
"description": "Средний чек (с напитком) в популярных недорогих кафе"
},
"entertainment": {
"title": "от 10 000 ₽",
"description": "Экскурсии, прогулки, развлечения, сувениры"
}
}
]
},
{
"id": "2",
"title": "Тула",
"subtitle": "Выходные за 30 000 ₽ на одного",
"thumb": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F1.jpg?alt=media",
"publishDate": "2021-03-21",
"expireDate": "2021-03-29",
"pages": [
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F1.jpg?alt=media",
"title": "Погода",
"text": "В апреле в Туле бывает дождливо и пасмурно, температура выше 10 градусов поднимется только к концу месяца, так что прогулкам на свежем воздухе лучше предпочесть осмотр музеев и ужины в модных ресторанах."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F2.jpg?alt=media",
"title": "Тульский Кремль",
"text": "Крепость с 500-летней историей: здесь в Смутное время бояре присягали на верность Лжедмитрию I. В одной из крепостных стен есть лавки с тульскими пряниками, белёвской пастилой, суворовскими конфетами и, конечно, самоварами. Взрослый билет в Кремль стоит 250 ₽."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F3.jpg?alt=media",
"title": "Креативное пространство «Искра»",
"text": "Здесь есть кофейни и бары, дизайнерские магазинчики, барбершопы, чайные, коворкинг, хостел. В рюмочной Lil Pil на территории «Искры» подают настойки, зерновые дистилляты и креплёное вино. Ещё одна интересная локация— мясной ресторан пивоварни Salden’s, которая поставляет пиво по всей России (средний чек — 1500 ₽ на человека без алкоголя). "
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F4.jpg?alt=media",
"title": "Поленово",
"text": "Усадьба <NAME>, где хранятся его работы и личные вещи. В доме вы увидите эскиз знаменитой картины «Христос и грешница», а спустившись к Оке — меланхоличные пейзажи, которыми вдохновлялся художник. Музей находится в часе езды от Тулы, взрослый билет с экскурсией стоит 350 ₽, детский — 150 ₽."
},
{
"type": "regular",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F5.jpg?alt=media",
"title": "Где жить",
"text": "Курортный спа-отель Grumant (от 4500 ₽/ночь с завтраком) находится рядом со знаменитой усадьбой Ясная Поляна, родовым имением Льва Толстого. В отеле есть бассейн, сауна и хаммам, боулинг; до центра Тулы — полчаса езды.\\n\\nВариант в той же ценовой категории, но в центре города — бутик-отель 11 Hotel & Garden. В 10 минутах ходьбы от отеля расположен «Ликёрка лофт» — ещё одно модное городское пространство с граффити, ресторанчиками и инстаграмными видами."
},
{
"type": "summary",
"image": "https://firebasestorage.googleapis.com/v0/b/mobile-transport-app.appspot.com/o/story%2Ftula%2F6.jpg?alt=media",
"title": "Выходные в Туле",
"price": {
"title": "Бюджет поездки:",
"subtitle": "в расчете на одного человека",
"value": "30 000 ₽"
},
"period": {
"title": "Когда",
"value": "27-28 марта"
},
"deeplink": "https://www.google.com",
"trip": {
"to": {
"type": "train",
"title": "Ласточка от 1 171 ₽",
"description": "туда 27 марта в 08:47"
},
"from": {
"type": "train",
"title": "Ласточка-премиум от 1 220 ₽",
"description": "обратно 28 марта в 20:39"
}
},
"accommodation": {
"title": "13 500 ₽",
"description": "Три ночи в двухместном номере с завтраком"
},
"nutrition": {
"title": "2000–2500 ₽",
"description": "Средний чек (с алкоголем) в модных ресторанах города и пригорода"
},
"entertainment": {
"title": "до 5000 ₽",
"description": "Экскурсии, развлечения, шопинг и сувениры"
}
}
]
},
{
"id": "3",
"title": "Казань",
"subtitle": "Царство янтаря на берегах бодрящего Балтийского моря",
"thumb": "https://picsum.photos/id/10/200/300",
"publishDate": "2021-03-27",
"expireDate": "2021-04-02",
"pages": [
{
"type": "regular",
"image": "https://picsum.photos/id/10/720/1280",
"title": "Весна в Калинграде",
"text": "Суровое море, прекрасная прибалтийская природа и насыщенная история региона: каждая эпоха нашла в городе и окрестностях свое отражение.",
"price": {
"title": "Бюджет",
"value": "от 55000 Р"
},
"period": {
"title": "Когда",
"value": "27 мар - 1 апр"
}
},
{
"type": "regular",
"image": "https://picsum.photos/id/1000/720/1280",
"title": "Остров Канта",
"text": "Остров Канта, именовавшийся ранее островом Кнайпхоф, занимает место посреди реки Преголи. В лучшие свои дни город Кнайпхоф был центром судоходства и торговли, активно застраивался, располагал десятками дорог и сотнями домов и соединялся с материком пятью мостами.",
"price": {
"title": "Бюджет",
"value": "55000"
}
},
{
"type": "regular",
"image": "https://picsum.photos/id/900/720/1280",
"title": "Форт «Дёнхофф»",
"text": "Если вам по душе архитектурный стиль Кафедрального собора, обязательно посетите хотя бы несколько городских ворот и фортов. При наличии свободного времени прогуляйтесь по маршруту бывшего оборонительного кольца Кёнигсберга"
},
{
"type": "summary",
"image": "https://picsum.photos/id/1019/720/1280",
"title": "Весна в Калинграде",
"price": {
"title": "Бюджет поездки:",
"subtitle": "в расчете на одного человека",
"value": "55 000 ₽"
},
"period": {
"title": "Когда",
"value": "27 мар - 1 апр"
},
"deeplink": "https://www.google.com",
"trip": {
"to": {
"type": "avia",
"title": "Аэрофлот от 5 000 Р",
"description": "обратно 28 марта в 18:30"
},
"from": {
"type": "avia",
"title": "Аэрофлот от 5 000 Р",
"description": "обратно 28 марта в 18:30"
}
},
"accommodation": {
"title": "18000",
"description": "7 ночей, двухместный номер с завтраком, исторический центр города"
},
"nutrition": {
"title": "1000",
"description": "Средний чек в лучших ресторанах города"
},
"entertainment": {
"title": "2000",
"description": "Прочие расходы на развлечения, билеты и экскурсии"
}
}
]
}
]
""".data(using: .utf8)!
}
}
<file_sep>/StoriesAppUITests/StoriesAppUITests.swift
//
// Created by onsissond.
//
import XCTest
class StoriesAppUITests: XCTestCase {
func testExample() {}
}
<file_sep>/Stories/UI/StoriesViewController/AlertBuilder+.swift
//
// Created by onsissond.
//
import ComposableArchitecture
extension AlertBuilder {
func with<T>(
alertState: ComposableArchitecture.AlertState<T>,
sendAction: @escaping (T) -> Void
) -> AlertBuilder {
var builder = with(title: alertState.title)
if let message = alertState.message {
builder = builder.with(message: message)
}
[alertState.primaryButton, alertState.secondaryButton].forEach {
switch $0 {
case .some(let button):
switch button.type {
case .cancel(let title):
guard let title = title else { break }
builder = builder.withCancel(title: title, action: {
guard let action = button.action else { return }
sendAction(action)
})
case .default(let title):
builder = builder.withAction(title: title, isPreffered: true) {
guard let action = button.action else { return }
sendAction(action)
}
case .destructive(let title):
builder = builder.withDestructive(title: title, action: {
guard let action = button.action else { return }
sendAction(action)
})
}
case .none:
break
}
}
return builder
}
}
<file_sep>/Stories/Core/Services/StoriesService.swift
//
// Created by onsissond.
//
import RxSwift
struct Stories: Equatable {
var activeStories: [Story]
var futureStories: [Story]
}
struct StoriesService {
typealias FetchStories = (_ date: Date) -> Single<Stories>
var fetchStories: (HTTPClient) -> FetchStories
}
extension StoriesService {
static var live = StoriesService(
fetchStories: _fetchStories
)
}
extension StoriesService {
private static var _fetchStories: (HTTPClient) -> FetchStories = { httpClient in
return { currentDate in
httpClient.fetchStories
.observeOn(SerialDispatchQueueScheduler(qos: .userInteractive))
.map { stories in
stories.sorted { $0.publishDate < $1.publishDate }
}
.map { stories in
Stories(
activeStories: stories.filter {
$0.publishDate < currentDate &&
$0.expireDate > currentDate
},
futureStories: stories.filter {
$0.publishDate > currentDate
}
)
}
.observeOn(MainScheduler.instance)
}
}
}
<file_sep>/Stories/UI/StoriesViewController/InstaProgressView/InstaProgressView.swift
//
// InstaProgressView.swift
// InstaProgressView
//
// Created by <NAME> on 11/19/20.
//
import UIKit
public class InstaProgressView: UIStackView {
private let _duration: TimeInterval
private let _progressTintColor: UIColor
private let _trackTintColor: UIColor
weak var delegate: InstaProgressViewDelegate?
public init(
progressTintColor: UIColor,
trackTintColor: UIColor,
spaceBetweenSegments: CGFloat,
duration: TimeInterval
) {
self._duration = duration
self._progressTintColor = progressTintColor
self._trackTintColor = trackTintColor
super.init(frame: CGRect.zero)
spacing = spaceBetweenSegments
_setupAppearence()
}
required init(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupAppearence() {
axis = .horizontal
distribution = .fillEqually
alignment = .fill
}
private func _createProgressView(
_ progressTintColor: UIColor,
_ trackTintColor: UIColor
) -> AnimatableProgressView {
AnimatableProgressView(
duration: _duration,
progressTintColor: _progressTintColor,
trackTintColor: _trackTintColor
)
}
}
extension InstaProgressView {
struct ViewState: Equatable {
var pagesCount: Int
var progress: [AnimatorState]
var currentPage: Int
}
enum AnimatorState: Equatable {
case initial
case `continue`
case pause
case stop
case start(UUID)
case finish
}
func render(viewState: ViewState) {
if viewState.pagesCount != arrangedSubviews.count {
arrangedSubviews.forEach { $0.removeFromSuperview() }
(0..<viewState.pagesCount)
.map { _ in _createProgressView(_progressTintColor, _trackTintColor) }
.forEach(addArrangedSubview)
}
zip(
viewState.progress,
arrangedSubviews.compactMap({ $0 as? AnimatableProgressView })
).forEach { animatorState, progressView in
switch animatorState {
case .initial:
progressView.render(viewState: .initial)
case .pause:
progressView.render(viewState: .pause)
case .continue:
progressView.render(viewState: .continue)
case .stop:
progressView.render(viewState: .stop)
case .start:
progressView.render(viewState: .run {
self.delegate?.next()
})
case .finish:
progressView.render(viewState: .finish)
}
}
}
private func _progressView(index: Int) -> AnimatableProgressView {
arrangedSubviews.compactMap({ $0 as? AnimatableProgressView })[index]
}
}
class AnimatableProgressView: UIProgressView {
private var _animator: UIViewPropertyAnimator
private var _viewState: ViewState?
init(
duration: TimeInterval,
progressTintColor: UIColor,
trackTintColor: UIColor
) {
_animator = UIViewPropertyAnimator(
duration: duration,
curve: .easeInOut
)
super.init(frame: .zero)
self.progressTintColor = progressTintColor
self.trackTintColor = trackTintColor
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
extension AnimatableProgressView {
enum ViewState {
case run(() -> Void)
case stop
case pause
case `continue`
case finish
case initial
}
func render(viewState: ViewState) {
_viewState = viewState
switch viewState {
case .initial:
_animator.stopAnimation(true)
progress = 0
case .run(let completion):
_animator.stopAnimation(true)
progress = 0.01
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in
guard let self = self,
case .run = self._viewState else { return }
self._animator.addAnimations {
self.setProgress(1, animated: true)
}
self._animator.addCompletion { _ in
completion()
}
self._animator.startAnimation()
}
case .stop:
_animator.stopAnimation(true)
case .pause:
_animator.pauseAnimation()
case .continue:
_animator.continueAnimation(withTimingParameters: nil, durationFactor: 0)
case .finish:
_animator.stopAnimation(true)
progress = 1
}
}
}
<file_sep>/Stories/Core/Services/NotificationService/NotificationSettingsStorage.swift
//
// Created by onsissond.
//
private let subscriptionKey = "Stories Subsciption"
struct NotificationSettingsStorage {
var loadSubscriptionStatus: () -> StoriesSubscriptionStatus?
var saveSubscriptionStatus: (StoriesSubscriptionStatus) -> Void
}
extension NotificationSettingsStorage {
enum StoriesSubscriptionStatus: String {
case on, off, failure
}
}
extension NotificationSettingsStorage {
static var live = NotificationSettingsStorage(
loadSubscriptionStatus: _loadStoriesSubscriptionStatus(),
saveSubscriptionStatus: _saveStoriesSubscriptionStatus()
)
}
extension NotificationSettingsStorage {
private static func _loadStoriesSubscriptionStatus(
userDefaults: UserDefaults = .standard
) -> () -> StoriesSubscriptionStatus? {
return {
userDefaults.string(forKey: subscriptionKey)
.map { StoriesSubscriptionStatus(rawValue: $0)! }
}
}
private static func _saveStoriesSubscriptionStatus(
userDefaults: UserDefaults = .standard
) -> (StoriesSubscriptionStatus) -> Void {
return { status in
UserDefaults.standard.setValue(
status.rawValue,
forKeyPath: subscriptionKey
)
}
}
}
<file_sep>/Stories/UI/Resources/Strings/Strings.swift
// swiftlint:disable all
// Generated using SwiftGen — https://github.com/SwiftGen/SwiftGen
import Foundation
// swiftlint:disable superfluous_disable_command
// swiftlint:disable file_length
// MARK: - Strings
// swiftlint:disable explicit_type_interface function_parameter_count identifier_name line_length
// swiftlint:disable nesting type_body_length type_name
internal enum L10n {
internal enum Alert {
internal enum Feedback {
/// Помогите улучшить наш сервис рекомендаций, ответив на несколько вопросов
internal static let message = L10n.tr("Localizable", "Alert.Feedback.message")
/// Это займет 1 минуту
internal static let title = L10n.tr("Localizable", "Alert.Feedback.title")
internal enum Button {
/// Не в этот раз
internal static let cancel = L10n.tr("Localizable", "Alert.Feedback.Button.cancel")
/// Да, давайте
internal static let ok = L10n.tr("Localizable", "Alert.Feedback.Button.ok")
}
}
internal enum TurnOnNotifications {
/// Чтобы включить подписки разреши уведомления
internal static let message = L10n.tr("Localizable", "Alert.TurnOnNotifications.message")
/// Включить подписки!
internal static let title = L10n.tr("Localizable", "Alert.TurnOnNotifications.title")
internal enum Button {
/// Отмена
internal static let cancel = L10n.tr("Localizable", "Alert.TurnOnNotifications.Button.cancel")
/// Настройки
internal static let ok = L10n.tr("Localizable", "Alert.TurnOnNotifications.Button.ok")
}
}
}
internal enum FutureStoryPreview {
/// %ld
internal static func days(_ p1: Int) -> String {
return L10n.tr("Localizable", "FutureStoryPreview.days", p1)
}
}
internal enum FutureStoryPreviewCell {
internal enum SubscribeButton {
/// Напомнить
internal static let disabled = L10n.tr("Localizable", "FutureStoryPreviewCell.subscribeButton.disabled")
/// Не напоминать
internal static let enabled = L10n.tr("Localizable", "FutureStoryPreviewCell.subscribeButton.enabled")
/// Повторить
internal static let failure = L10n.tr("Localizable", "FutureStoryPreviewCell.subscribeButton.failure")
}
}
internal enum Notification {
/// Доступно новое путешествие!
internal static let title = L10n.tr("Localizable", "Notification.title")
}
internal enum SummaryContentStoryView {
internal enum BuyButton {
/// Купить билет!
internal static let title = L10n.tr("Localizable", "SummaryContentStoryView.BuyButton.title")
}
}
internal enum TurnOnNotificationsAlert {
/// Чтобы включить подписки разреши уведомления
internal static let message = L10n.tr("Localizable", "TurnOnNotificationsAlert.message")
/// Включить подписки!
internal static let title = L10n.tr("Localizable", "TurnOnNotificationsAlert.title")
internal enum Button {
/// Отмена
internal static let cancel = L10n.tr("Localizable", "TurnOnNotificationsAlert.Button.cancel")
/// Настройки
internal static let ok = L10n.tr("Localizable", "TurnOnNotificationsAlert.Button.ok")
}
}
}
// swiftlint:enable explicit_type_interface function_parameter_count identifier_name line_length
// swiftlint:enable nesting type_body_length type_name
// MARK: - Implementation Details
extension L10n {
private static func tr(_ table: String, _ key: String, _ args: CVarArg...) -> String {
// swiftlint:disable:next nslocalizedstring_key
let format = NSLocalizedString(key, tableName: table, bundle: Bundle(for: BundleToken.self), comment: "")
return String(format: format, locale: Locale.current, arguments: args)
}
}
private final class BundleToken {}
<file_sep>/Stories/Core/Domain/Transport.swift
//
// Created by onsissond.
//
enum Transport: String, Equatable, Decodable {
case avia, train, bus
}
<file_sep>/Stories/UI/StoryContentView/RegularContentStoryView/RegularContentStoryView.swift
//
// Created by onsissond.
//
import UIKit
import RxCocoa
import Kingfisher
final class RegularContentStoryView: UIView {
private lazy var _dismissButton: UIButton = .makeStoryDismiss()
private lazy var _imageView: UIImageView = {
$0.contentMode = .scaleAspectFill
return $0
}(UIImageView())
private lazy var _stackView: UIStackView = {
$0.axis = .vertical
$0.spacing = 16
return $0
}(UIStackView())
private lazy var _titleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .title1)
$0.textColor = .white
$0.numberOfLines = 0
return $0
}(UILabel())
private lazy var _descriptionLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .headline)
$0.textColor = .white
$0.numberOfLines = 0
return $0
}(UILabel())
private lazy var _footerContentView = FooterContentView()
private lazy var _gradientLayer: CAGradientLayer = .storyGradient
init() {
super.init(frame: .zero)
_setupSubviews()
_setupLayout()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func layoutSubviews() {
super.layoutSubviews()
_gradientLayer.frame = frame
}
private func _setupLayout() {
_imageView.layer.insertSublayer(_gradientLayer, at: 0)
}
private func _setupSubviews() {
addSubview(_imageView)
_imageView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_imageView.topAnchor.constraint(equalTo: topAnchor),
_imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
_imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
_imageView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
addSubview(_dismissButton)
_dismissButton.translatesAutoresizingMaskIntoConstraints = false
if hasEyebrow {
_dismissButton.topAnchor.constraint(equalTo: topAnchor, constant: 80).isActive = true
} else {
_dismissButton.topAnchor.constraint(equalTo: topAnchor, constant: 62).isActive = true
}
_dismissButton.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16).isActive = true
addSubview(_stackView)
_stackView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_stackView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16),
_stackView.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16),
_stackView.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -64)
])
_stackView.addArrangedSubview(_titleLabel)
_stackView.addArrangedSubview(_descriptionLabel)
_stackView.addArrangedSubview(_footerContentView)
}
}
extension RegularContentStoryView: StoryContentView {
func render(viewState: StoryContent) -> Bool {
guard case .regular(let viewState) = viewState else {
return false
}
_imageView.kf.setImage(
with: viewState.image.imageURL,
options: [
.transition(.fade(0.5))
]
)
_titleLabel.text = viewState.title
_descriptionLabel.text = viewState.description
_footerContentView.render(viewState: .init(
periodInfo: viewState.periodInfo,
priceInfo: viewState.priceInfo
))
_footerContentView.isHidden = viewState.periodInfo == nil &&
viewState.priceInfo == nil
return true
}
}
extension RegularContentStoryView {
enum Event {
case dismiss
}
var events: ControlEvent<Event> {
ControlEvent(
events: _dismissButton.rx.controlEvent(.touchUpInside)
.map { _ in .dismiss }
)
}
}
<file_sep>/StoriesApp/SceneDelegate.swift
//
// Created by onsissond.
//
import UIKit
import Stories
class SceneDelegate: UIResponder, UIWindowSceneDelegate {
var window: UIWindow?
func scene(
_ scene: UIScene,
willConnectTo session: UISceneSession,
options connectionOptions: UIScene.ConnectionOptions
) {
guard let scene = (scene as? UIWindowScene) else { return }
window = UIWindow(windowScene: scene)
window?.rootViewController = StoriesFactory.create(dependency: .init(
httpClient: .mock,
dateProvider: { .mock("2021-03-28", dateFormat: "yyyy-MM-dd")! },
openURL: { UIApplication.shared.open($0) }
), payload: Void())
window?.makeKeyAndVisible()
}
}
<file_sep>/Stories/UI/StoriesViewController/CubeAnimator/CubeAttributesAnimator.swift
//
// Created by onsissond.
//
import UIKit
/// An animator that applies a cube transition effect when you scroll.
struct CubeAttributesAnimator: LayoutAttributesAnimator {
/// The perspective that will be applied to the cells. Must be negative. -1/500 by default.
/// Recommended range [-1/2000, -1/200].
var perspective: CGFloat
/// The higher the angle is, the _steeper_ the cell would be when transforming.
var totalAngle: CGFloat
init(perspective: CGFloat = -1 / 500, totalAngle: CGFloat = .pi / 2) {
self.perspective = perspective
self.totalAngle = totalAngle
}
func animate(collectionView: UICollectionView, attributes: AnimatedCollectionViewLayoutAttributes) {
let position = attributes.middleOffset
guard let contentView = attributes.contentView else { return }
if abs(position) >= 1 {
contentView.layer.transform = CATransform3DIdentity
contentView.layer.anchorPoint = CGPoint(x: 0.5, y: 0.5)
} else if attributes.scrollDirection == .horizontal {
let rotateAngle = totalAngle * position
let anchorPoint = CGPoint(x: position > 0 ? 0 : 1, y: 0.5)
// As soon as we changed anchor point, we'll need to either update frame/position
// or transform to offset the position change. frame doesn't work for iOS 14 any
// more so we'll use transform.
let anchorPointOffsetValue = contentView.layer.bounds.width / 2
let anchorPointOffset = position > 0 ? -anchorPointOffsetValue : anchorPointOffsetValue
var transform = CATransform3DMakeTranslation(anchorPointOffset, 0, 0)
contentView.layer.anchorPoint = anchorPoint
if contentView.translatesAutoresizingMaskIntoConstraints == true {
// not use transformX/Y
transform = CATransform3DMakeTranslation(0, 0, 0)
// reset origin
var frame = attributes.frame
frame.origin = .zero
contentView.frame = frame
}
transform.m34 = perspective
transform = CATransform3DRotate(transform, rotateAngle, 0, 1, 0)
contentView.layer.transform = transform
} else {
let rotateAngle = totalAngle * position
let anchorPoint = CGPoint(x: 0.5, y: position > 0 ? 0 : 1)
// As soon as we changed anchor point, we'll need to either update frame/position
// or transform to offset the position change. frame doesn't work for iOS 14 any
// more so we'll use transform.
let anchorPointOffsetValue = contentView.layer.bounds.height / 2
let anchorPointOffset = position > 0 ? -anchorPointOffsetValue : anchorPointOffsetValue
var transform = CATransform3DMakeTranslation(0, anchorPointOffset, 0)
transform.m34 = perspective
transform = CATransform3DRotate(transform, rotateAngle, -1, 0, 0)
contentView.layer.transform = transform
contentView.layer.anchorPoint = anchorPoint
}
collectionView.isUserInteractionEnabled = false
DispatchQueue.main.asyncAfter(deadline: .now() + .milliseconds(300)) {
collectionView.isUserInteractionEnabled = true
}
}
}
<file_sep>/Stories/Core/API/FetchStories.swift
//
// Created by onsissond.
//
import RxSwift
import Moya
import RxMoya
extension HTTPClient {
var fetchStories: Single<[Story]> {
rx.request(MultiTarget(FetchStoriesRequest()))
.subscribeOn(SerialDispatchQueueScheduler(qos: .utility))
.map([Story].self)
.catchErrorJustReturn([])
.observeOn(MainScheduler.instance)
}
}
<file_sep>/Stories/UI/StoriesViewController/InstaProgressView/InstaProgressViewDelegate.swift
//
// InstaProgressViewDelegate.swift
// InstaProgressView
//
// Created by <NAME> on 11/19/20.
//
import Foundation
protocol InstaProgressViewDelegate: class {
func next()
func back()
}
<file_sep>/Stories/UI/StoryContentView/StoryGradient.swift
//
// Created by onsissond.
//
import UIKit
extension CAGradientLayer {
static var storyGradient: CAGradientLayer {
let layer = CAGradientLayer()
layer.startPoint = CGPoint(x: 1, y: 0)
layer.endPoint = CGPoint(x: 1, y: 1)
layer.colors = [
UIColor.clear.cgColor,
UIColor(hex: 0xF1D2F).withAlphaComponent(0.53).cgColor,
UIColor(hex: 0x11834).cgColor
]
layer.locations = [0, 0.5, 1]
return layer
}
}
<file_sep>/Stories/UI/StoryContentView/SummaryContentStoryView/SummaryFooterStoryView.swift
//
// Created by onsissond.
//
import UIKit
final class SummaryFooterStoryView: UIView {
private lazy var _priceLabel: UILabel = {
$0.setContentHuggingPriority(.required, for: .horizontal)
$0.font = .preferredFont(forTextStyle: .title2)
$0.textColor = .white
return $0
}(UILabel())
private lazy var _titleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .headline)
$0.textColor = .white
$0.setContentHuggingPriority(.required, for: .vertical)
return $0
}(UILabel())
private lazy var _subtitleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .caption2)
$0.textColor = .lightGray
$0.setContentHuggingPriority(.required, for: .vertical)
return $0
}(UILabel())
init() {
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
addSubview(_titleLabel)
_titleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_titleLabel.topAnchor.constraint(equalTo: topAnchor),
_titleLabel.leadingAnchor.constraint(equalTo: leadingAnchor)
])
addSubview(_subtitleLabel)
_subtitleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_subtitleLabel.topAnchor.constraint(equalTo: _titleLabel.bottomAnchor, constant: 4),
_subtitleLabel.leadingAnchor.constraint(equalTo: leadingAnchor),
_subtitleLabel.bottomAnchor.constraint(equalTo: bottomAnchor)
])
addSubview(_priceLabel)
_priceLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_priceLabel.topAnchor.constraint(equalTo: _titleLabel.topAnchor),
_priceLabel.leadingAnchor.constraint(equalTo: _titleLabel.trailingAnchor, constant: 8),
_priceLabel.trailingAnchor.constraint(equalTo: trailingAnchor),
_priceLabel.bottomAnchor.constraint(greaterThanOrEqualTo: bottomAnchor)
])
}
}
extension SummaryFooterStoryView {
struct ViewState {
var title: String
var subtitle: String
var price: String
}
func render(viewState: ViewState) {
_priceLabel.text = viewState.price
_titleLabel.text = viewState.title
_subtitleLabel.text = viewState.subtitle
}
}
<file_sep>/Stories/UI/StoryContentView/SummaryContentStoryView/SummaryHeaderStoryView.swift
//
// Created by onsissond.
//
import UIKit
final class SummaryHeaderStoryView: UIView {
private lazy var _periodLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .headline)
$0.textColor = .white
$0.setContentHuggingPriority(.required, for: .vertical)
return $0
}(UILabel())
private lazy var _titleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .title1)
$0.textColor = .white
$0.numberOfLines = 0
$0.setContentHuggingPriority(.required, for: .vertical)
return $0
}(UILabel())
init() {
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
addSubview(_periodLabel)
_periodLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_periodLabel.topAnchor.constraint(equalTo: topAnchor),
_periodLabel.leadingAnchor.constraint(equalTo: leadingAnchor),
_periodLabel.trailingAnchor.constraint(equalTo: trailingAnchor)
])
addSubview(_titleLabel)
_titleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_titleLabel.topAnchor.constraint(equalTo: _periodLabel.bottomAnchor, constant: 4),
_titleLabel.leadingAnchor.constraint(equalTo: leadingAnchor),
_titleLabel.trailingAnchor.constraint(equalTo: trailingAnchor),
_titleLabel.bottomAnchor.constraint(equalTo: bottomAnchor)
])
}
}
extension SummaryHeaderStoryView {
struct ViewState {
var period: String
var title: String
}
func render(viewState: ViewState) {
_periodLabel.text = viewState.period
_titleLabel.text = viewState.title
}
}
<file_sep>/Stories/Core/Domain/StoryPreview.swift
//
// Created by onsissond.
//
import UIKit
struct RegularStoryPreview: Equatable {
var title: String
var description: String?
var imageURL: URL
}
extension RegularStoryPreview: Decodable {
enum CodingKeys: String, CodingKey {
case title
case description = "subtitle"
case image = "thumb"
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
title = try container.decode(String.self, forKey: .title)
description = try container.decode(String.self, forKey: .description)
imageURL = try container.decode(URL.self, forKey: .image)
}
}
<file_sep>/Stories/UI/StoriesViewController/CubeAnimator/LayoutAttributesAnimator.swift
//
// Created by onsissond.
//
import UIKit
protocol LayoutAttributesAnimator {
func animate(collectionView: UICollectionView, attributes: AnimatedCollectionViewLayoutAttributes)
}
class AnimatedCollectionViewLayoutAttributes: UICollectionViewLayoutAttributes {
var contentView: UIView?
var scrollDirection: UICollectionView.ScrollDirection = .vertical
/// The ratio of the distance between the start of the cell and the start of the collectionView and the height/width of the cell depending on the scrollDirection. It's 0 when the start of the cell aligns the start of the collectionView. It gets positive when the cell moves towards the scrolling direction (right/down) while getting negative when moves opposite.
var startOffset: CGFloat = 0
/// The ratio of the distance between the center of the cell and the center of the collectionView and the height/width of the cell depending on the scrollDirection. It's 0 when the center of the cell aligns the center of the collectionView. It gets positive when the cell moves towards the scrolling direction (right/down) while getting negative when moves opposite.
var middleOffset: CGFloat = 0
/// The ratio of the distance between the **start** of the cell and the end of the collectionView and the height/width of the cell depending on the scrollDirection. It's 0 when the **start** of the cell aligns the end of the collectionView. It gets positive when the cell moves towards the scrolling direction (right/down) while getting negative when moves opposite.
var endOffset: CGFloat = 0
override func copy(with zone: NSZone? = nil) -> Any {
let copy = super.copy(with: zone) as! AnimatedCollectionViewLayoutAttributes
copy.contentView = contentView
copy.scrollDirection = scrollDirection
copy.startOffset = startOffset
copy.middleOffset = middleOffset
copy.endOffset = endOffset
return copy
}
override func isEqual(_ object: Any?) -> Bool {
guard let o = object as? AnimatedCollectionViewLayoutAttributes else { return false }
return super.isEqual(o)
&& o.contentView == contentView
&& o.scrollDirection == scrollDirection
&& o.startOffset == startOffset
&& o.middleOffset == middleOffset
&& o.endOffset == endOffset
}
}
<file_sep>/Stories/UI/StoryContentView/UIButton+StoryDismiss.swift
//
// Created by onsissond.
//
import UIKit
extension UIButton {
static func makeStoryDismiss() -> UIButton {
let button = UIButton()
button.setImage(Asset.Icon.dismiss.image, for: .normal)
return button
}
}
<file_sep>/Stories/RxOptional/Observable+FilterNil.swift
//
// Created by onsissond.
//
import RxSwift
public extension ObservableType where Element: OptionalType {
/**
Unwraps and filters out `nil` elements.
- returns: `Observable` of source `Observable`'s elements, with `nil` elements filtered out.
*/
func filterNil() -> Observable<Element.Wrapped> {
return flatMap { element -> Observable<Element.Wrapped> in
guard let value = element.value else {
return Observable<Element.Wrapped>.empty()
}
return Observable<Element.Wrapped>.just(value)
}
}
}
public protocol OptionalType {
associatedtype Wrapped
var value: Wrapped? { get }
}
extension Optional: OptionalType {
/// Cast `Optional<Wrapped>` to `Wrapped?`
public var value: Wrapped? {
return self
}
}
<file_sep>/Stories/UI/StoriesViewController/StoryCell.swift
//
// Created by onsissond.
//
import UIKit
import ComposableArchitecture
import RxSwift
enum StorySystem {
typealias LocalStore = Store<State, Action>
typealias LocalViewStore = ViewStore<State, Action>
struct State: Equatable {
var story: Story
var progressState: InstaProgressView.ViewState
var currentPage: Int {
get { progressState.currentPage }
set { progressState.currentPage = newValue }
}
init(story: Story) {
self.story = story
progressState = InstaProgressView.ViewState(
pagesCount: story.content.count,
progress: story.content.map { _ in .initial },
currentPage: 0
)
}
}
enum Action: Equatable {
case nextStory
case previousStory
case nextPage
case previousPage
case finish
case run
case nullify
case pause
case `continue`
case dismiss
case openDeepLink(URL)
case requestFeedback
}
struct Environment {
var openURL: (URL) -> Void
}
}
extension StorySystem {
static var reducer = Reducer<State, Action, Environment> { state, action, env in
switch action {
case .nextPage:
if state.currentPage == state.story.content.count - 1 {
return Effect(value: .nextStory)
} else if state.currentPage < state.story.content.count - 1 {
state.progressState.progress[state.currentPage] = .finish
state.currentPage += 1
state.progressState.progress[state.currentPage] = .start(UUID())
}
case .previousPage:
if state.currentPage == 0 {
return Effect(value: .previousStory)
} else if state.currentPage > 0 {
state.progressState.progress[state.currentPage] = .initial
state.currentPage -= 1
state.progressState.progress[state.currentPage] = .start(UUID())
}
case .run:
for index in (0..<state.progressState.pagesCount) {
if index < state.currentPage {
state.progressState.progress[index] = .finish
} else if index == state.currentPage {
state.progressState.progress[index] = .start(UUID())
} else {
state.progressState.progress[index] = .initial
}
}
case .nullify:
state.progressState.progress = state.progressState.progress
.map { _ in .initial }
case .finish:
state.progressState.progress[state.currentPage] = .finish
case .pause, .requestFeedback:
state.progressState.progress[state.currentPage] = .pause
case .continue:
state.progressState.progress[state.currentPage] = .continue
case let .openDeepLink(url):
env.openURL(url)
case .nextStory, .previousStory, .dismiss:
break
}
return .none
}
}
protocol StoryContentView: UIView {
func render(viewState: StoryContent) -> Bool
}
class StoryCell: UICollectionViewCell {
private lazy var _storyContentView = SwitchableView(contentViews: [
_createRegularContentStoryView(),
_createSummaryContentStoryView()
])
private lazy var _progressView = InstaProgressView(
progressTintColor: .white,
trackTintColor: UIColor.white.withAlphaComponent(0.5),
spaceBetweenSegments: 8,
duration: 10
)
private lazy var _reusableDisposeBag = DisposeBag()
private lazy var _disposeBag = DisposeBag()
private var _viewStore: StorySystem.LocalViewStore?
private lazy var _dataSource = PublishSubject<InstaProgressView.ViewState>()
override init(frame: CGRect) {
super.init(frame: frame)
_setupSubviews()
_setupAppearence()
_setupGesturesRecognizers()
_setupSubscriptions()
_progressView.delegate = self
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupAppearence() {
contentView.clipsToBounds = true
}
private func _setupSubscriptions() {
_dataSource
.distinctUntilChanged()
.bind(onNext: { [weak self] in
self?._progressView.render(
viewState: $0
)
})
.disposed(by: _disposeBag)
}
private func _setupSubviews() {
contentView.addSubview(_storyContentView)
_storyContentView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_storyContentView.topAnchor.constraint(equalTo: topAnchor),
_storyContentView.leadingAnchor.constraint(equalTo: leadingAnchor),
_storyContentView.trailingAnchor.constraint(equalTo: trailingAnchor),
_storyContentView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
contentView.addSubview(_progressView)
if hasEyebrow {
_progressView.topAnchor.constraint(equalTo: topAnchor, constant: 48).isActive = true
} else {
_progressView.topAnchor.constraint(equalTo: topAnchor, constant: 24).isActive = true
}
_progressView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 8).isActive = true
_progressView.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -8).isActive = true
_progressView.heightAnchor.constraint(equalToConstant: 2).isActive = true
_progressView.translatesAutoresizingMaskIntoConstraints = false
}
private func _createRegularContentStoryView() -> StoryContentView {
let view = RegularContentStoryView()
view.events.bind(onNext: { [weak self] in
switch $0 {
case .dismiss:
self?._viewStore?.send(.dismiss)
}
}).disposed(by: _disposeBag)
return view
}
private func _createSummaryContentStoryView() -> StoryContentView {
let view = SummaryContentStoryView()
view.events.bind(onNext: { [weak self] in
switch $0 {
case .dismiss:
self?._viewStore?.send(.dismiss)
case let .openDeeplink(url):
self?._viewStore?.send(.openDeepLink(url))
case .requestFeedback:
self?._viewStore?.send(.requestFeedback)
}
}).disposed(by: _disposeBag)
return view
}
private func _setupGesturesRecognizers() {
let swipeLeft = UITapGestureRecognizer(target: self, action: #selector(handleGesture))
addGestureRecognizer(swipeLeft)
}
@objc private func handleGesture(gesture: UITapGestureRecognizer) {
let location = gesture.location(in: self)
if location.x < layer.frame.size.width / 2 {
_viewStore?.send(.previousPage)
} else {
_viewStore?.send(.nextPage)
}
}
}
extension StoryCell {
func render(store: StorySystem.LocalStore) {
_viewStore = ViewStore(store)
_reusableDisposeBag = DisposeBag()
_viewStore?.publisher.subscribe(onNext: { [weak self] in
_ = self?._storyContentView.render(
viewState: $0.story.content[$0.currentPage]
)
}).disposed(by: _reusableDisposeBag)
_viewStore?.publisher.map(\.progressState)
.subscribe(onNext: _dataSource.onNext)
.disposed(by: _reusableDisposeBag)
}
}
extension StoryCell: InstaProgressViewDelegate {
func next() {
_viewStore?.send(.nextPage)
}
func back() {
_viewStore?.send(.previousStory)
}
}
extension UIView {
var hasEyebrow: Bool {
UIScreen.main.bounds.height >= 812
}
}
<file_sep>/README.md
# Stories
MVP of feature about stories.
<p float="left">
<img src="/Docs/Images/MainScreen.png" width="200" />
<img src="/Docs/Images/RegularStory.png" width="200" />
<img src="/Docs/Images/SummeryStory.png" width="200" />
</p>
Architecture:
[Composable Architecture](https://github.com/pointfreeco/swift-composable-architecture)
<file_sep>/Stories/Core/Services/NotificationService/NotificationService.swift
//
// Created by onsissond.
//
struct NotificationService {
var notificationProvider: NotificationProvider
var notificationSettingsStorage: NotificationSettingsStorage
}
extension NotificationService {
static var live = NotificationService(
notificationProvider: .live,
notificationSettingsStorage: .live
)
}
<file_sep>/Stories/UI/StoriesViewController/StoriesSystem.swift
//
// Created by onsissond.
//
import UIKit
import RxSwift
import ComposableArchitecture
enum StoriesSystem {
typealias LocalStore = Store<State, Action>
struct State: Equatable {
var stories: [StorySystem.State] = []
var currentStory = 0
var dismiss = false
var feedbackAlert: AlertState<Action>?
var feedbackURL: URL?
}
enum Action: Equatable {
case storyAction(storyIndex: Int, action: StorySystem.Action)
case launchFeedback
case launchedFeedback
case dismissFeedbackAlert
}
struct Environment {
var storyEnvironment: StorySystem.Environment
}
}
extension StoriesSystem.State {
init(stories: [Story], currentStory: Int) {
self.stories = stories.map(StorySystem.State.init)
self.currentStory = currentStory
}
}
extension StoriesSystem {
static var reducer = Reducer<State, Action, Environment> { state, action, _ in
switch action {
case .launchFeedback:
state.feedbackAlert = nil
state.feedbackURL = URL(string: "https://www.google.com")!
return .init(value: .storyAction(
storyIndex: state.currentStory,
action: .pause
))
case .launchedFeedback:
state.feedbackURL = nil
case .dismissFeedbackAlert:
state.feedbackAlert = nil
return .init(value: .storyAction(
storyIndex: state.currentStory,
action: .continue
))
case let .storyAction(index, .previousStory):
if index - 1 >= 0 {
state.currentStory = index - 1
return .concatenate(
Effect(value: .storyAction(storyIndex: index, action: .nullify)),
Effect(value: .storyAction(storyIndex: state.currentStory, action: .run))
)
} else {
return Effect(value: .storyAction(storyIndex: state.currentStory, action: .run))
}
case let .storyAction(index, .nextStory):
if index + 1 < state.stories.count {
state.currentStory = index + 1
return .concatenate(
Effect(value: .storyAction(storyIndex: index, action: .finish)),
Effect(value: .storyAction(storyIndex: state.currentStory, action: .run))
)
} else {
return Effect(value: .storyAction(storyIndex: index, action: .finish))
}
case .storyAction(_, .requestFeedback):
state.feedbackAlert = .feedback
case .storyAction:
return .none
}
return .none
}
.combined(
with: StorySystem.reducer.forEach(
state: \.stories,
action: /StoriesSystem.Action.storyAction,
environment: \.storyEnvironment
)
)
}
<file_sep>/Stories/UI/StoriesPreviewViewController/StoriesPreviewSystem.swift
//
// Created by onsissond.
//
import UIKit
import RxSwift
import ComposableArchitecture
enum StoriesPreviewSystem {
typealias LocalStore = ComposableArchitecture.Store<State, Action>
enum SubscriptionState: Equatable {
case off
case on
case failure(UUID, needShowAlert: Bool)
}
enum SetupNotificationMode {
case manual
case automatic
}
struct State: Equatable {
var storiesState: StoriesSystem.State?
var stories: [Story] = []
var futureStories: [Story] = []
var futureStory: FutureStory?
var subscriptionState: SubscriptionState = .off
}
enum Action: Equatable {
case viewDidLoad
case loadedStories(Stories)
case setupFutureStories([Story])
case switchNotifications
case openStories(index: Int)
case openSettings
case dismissStories
case setSubscriptionState(SubscriptionState)
case setupNotification(SetupNotificationMode)
case storiesAction(StoriesSystem.Action)
}
struct Environment {
var fetchStories: () -> Effect<Stories>
var currentDate: () -> Date
var uuid: () -> UUID
var calendar: () -> Calendar
var notificationService: NotificationService
var storiesEnvironment: StoriesSystem.Environment
}
}
extension StoriesPreviewSystem.State {
enum Item {
case story(Story)
case future(FutureStory)
}
var dataSource: [Item] {
stories.map(Item.story) +
(futureStory.map(Item.future).map { [$0] } ?? [])
}
}
extension StoriesPreviewSystem {
static var reducer = ComposableArchitecture.Reducer<State, Action, Environment> { state, action, env in
switch action {
case .viewDidLoad:
state.subscriptionState = env.notificationService
.notificationSettingsStorage
.loadSubscriptionStatus()
.map { .init($0, uuidProvider: env.uuid) }
?? .off
return env.fetchStories().map(Action.loadedStories)
case .loadedStories(let stories):
state.stories = stories.activeStories
return .init(value: .setupFutureStories(stories.futureStories))
case .openStories(let index):
state.storiesState = .init(
stories: state.stories,
currentStory: index
)
return .none
case .setupFutureStories(let stories):
state.futureStories = stories
state.futureStory = state.futureStories.first.map {
.init(
story: $0,
calendar: env.calendar(),
currentDate: env.currentDate()
)
}
switch state.subscriptionState {
case .on: return .init(value: .setupNotification(.automatic))
case .off, .failure: return .none
}
case .openSettings:
env.storiesEnvironment.storyEnvironment.openURL(
URL(string: UIApplication.openSettingsURLString)!
)
return .none
case .switchNotifications:
switch state.subscriptionState {
case .on:
state.subscriptionState = .off
env.notificationService.notificationSettingsStorage
.saveSubscriptionStatus(.off)
env.notificationService.notificationProvider
.removeStoryNotifications()
case .off, .failure:
return .init(value: .setupNotification(.manual))
}
return .none
case .setupNotification(let mode):
env.notificationService.notificationProvider.removeStoryNotifications()
return env.notificationService.notificationProvider.setupNotification(
state.futureStories
)
.map { result -> StoriesPreviewSystem.SubscriptionState? in
.init(result, mode: mode, uuidProvider: env.uuid)
}
.asObservable()
.filterNil()
.map(Action.setSubscriptionState)
.eraseToEffect()
case .setSubscriptionState(let value):
state.subscriptionState = value
env.notificationService.notificationSettingsStorage
.saveSubscriptionStatus(.init(state: value))
return .none
case .dismissStories:
state.storiesState = nil
return .none
case .storiesAction(.storyAction(_, .dismiss)):
return Observable.just(())
.map { .dismissStories }
.eraseToEffect()
case .storiesAction:
return .none
}
}
.combined(with: StoriesSystem.reducer.optional().pullback(
state: \.storiesState,
action: /Action.storiesAction,
environment: \.storiesEnvironment
))
}
private extension StoriesPreviewSystem.SubscriptionState {
init?(
_ result: NotificationProvider.SetupNotificationResult,
mode: StoriesPreviewSystem.SetupNotificationMode,
uuidProvider: () -> UUID
) {
switch result {
case .success:
self = .on
case .failure:
guard mode == .manual else {
return nil
}
self = .failure(uuidProvider(), needShowAlert: true)
}
}
}
private extension NotificationSettingsStorage.StoriesSubscriptionStatus {
init(state: StoriesPreviewSystem.SubscriptionState) {
switch state {
case .off: self = .off
case .on: self = .on
case .failure: self = .failure
}
}
}
private extension StoriesPreviewSystem.SubscriptionState {
init(
_ storiesSubscriptionStatus: NotificationSettingsStorage.StoriesSubscriptionStatus,
uuidProvider: () -> UUID
) {
switch storiesSubscriptionStatus {
case .on:
self = .on
case .off:
self = .off
case .failure:
self = .failure(uuidProvider(), needShowAlert: false)
}
}
}
private extension FutureStory {
init(story: Story, calendar: Calendar, currentDate: Date) {
self.init(
imageURL: story.preview.imageURL,
daysToFutureStory: {
calendar.dateComponents(
[.day],
from: currentDate,
to: story.publishDate
).day ?? 0
}()
)
}
}
<file_sep>/Stories/UI/StoryContentView/RegularContentStoryView/FooterStoryView.swift
//
// Created by onsissond.
//
import UIKit
final class FooterStoryView: UIView {
private lazy var _titleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .subheadline)
$0.textColor = .white
return $0
}(UILabel())
private lazy var _subtitleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .title3)
$0.textColor = .white
return $0
}(UILabel())
init() {
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
addSubview(_titleLabel)
_titleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_titleLabel.topAnchor.constraint(equalTo: topAnchor),
_titleLabel.leadingAnchor.constraint(equalTo: leadingAnchor),
_titleLabel.trailingAnchor.constraint(equalTo: trailingAnchor)
])
addSubview(_subtitleLabel)
_subtitleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_subtitleLabel.topAnchor.constraint(equalTo: _titleLabel.bottomAnchor, constant: 4),
_subtitleLabel.leadingAnchor.constraint(equalTo: leadingAnchor),
_subtitleLabel.trailingAnchor.constraint(equalTo: trailingAnchor),
_subtitleLabel.bottomAnchor.constraint(equalTo: bottomAnchor)
])
}
}
extension FooterStoryView {
struct ViewState {
let title: String
let subtitle: String
}
func render(viewState: ViewState) {
_titleLabel.text = viewState.title
_subtitleLabel.text = viewState.subtitle
}
}
class FooterContentView: UIView {
private lazy var _stackView: UIStackView = {
$0.axis = .horizontal
$0.alignment = .fill
$0.distribution = .fillEqually
$0.spacing = 8
return $0
}(UIStackView())
private lazy var _dateView = FooterStoryView()
private lazy var _priceView = FooterStoryView()
init() {
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
addSubview(_stackView)
_stackView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_stackView.topAnchor.constraint(equalTo: topAnchor),
_stackView.leadingAnchor.constraint(equalTo: leadingAnchor),
_stackView.trailingAnchor.constraint(equalTo: trailingAnchor),
_stackView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
_stackView.addArrangedSubview(_priceView)
_stackView.addArrangedSubview(_dateView)
}
}
extension FooterContentView {
struct ViewState {
let periodInfo: RegularStoryContent.Info?
let priceInfo: RegularStoryContent.Info?
}
func render(viewState: ViewState) {
if let periodInfo = viewState.periodInfo {
_dateView.render(viewState: .init(
title: periodInfo.title,
subtitle: periodInfo.subtitle
))
}
_dateView.isHidden = viewState.periodInfo == nil
if let priceInfo = viewState.priceInfo {
_priceView.render(viewState: .init(
title: priceInfo.title,
subtitle: priceInfo.subtitle
))
}
_priceView.isHidden = viewState.priceInfo == nil
}
}
<file_sep>/Stories/UI/StoriesPreviewViewController/UI/FutureStoryPreviewCell.swift
//
// Created by onsissond.
//
import UIKit
import ComposableArchitecture
import RxSwift
final class FutureStoryPreviewCell: UICollectionViewCell {
private lazy var _contentView: UIView = {
$0.backgroundColor = UIColor(hex: 0xAFD4FF)
$0.layer.cornerRadius = 10
$0.layer.masksToBounds = true
return $0
}(UIView())
private lazy var _imageView: UIImageView = {
$0.contentMode = .scaleAspectFill
$0.layer.cornerRadius = 10
$0.layer.masksToBounds = true
return $0
}(UIImageView())
private lazy var _blurEffectView = UIVisualEffectView(
effect: UIBlurEffect(style: .dark)
)
private lazy var _titleLabel: UILabel = {
$0.textColor = .white
$0.numberOfLines = 0
$0.textAlignment = .center
$0.font = .preferredFont(forTextStyle: .caption1)
return $0
}(UILabel())
private lazy var _subscribeLabel: UILabel = {
$0.textColor = .white
$0.textAlignment = .center
$0.font = .preferredFont(forTextStyle: .caption1)
$0.backgroundColor = .black
$0.layer.cornerRadius = 4
$0.layer.masksToBounds = true
$0.setContentCompressionResistancePriority(.required, for: .vertical)
return $0
}(UILabel())
override init(frame: CGRect) {
super.init(frame: frame)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
contentView.addSubview(_contentView)
_contentView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_contentView.topAnchor.constraint(equalTo: contentView.topAnchor),
_contentView.bottomAnchor.constraint(equalTo: contentView.bottomAnchor),
_contentView.leftAnchor.constraint(equalTo: contentView.leftAnchor),
_contentView.rightAnchor.constraint(equalTo: contentView.rightAnchor)
])
_contentView.addSubview(_imageView)
_imageView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_imageView.topAnchor.constraint(equalTo: _contentView.topAnchor, constant: 2),
_imageView.leadingAnchor.constraint(equalTo: _contentView.leadingAnchor, constant: 2),
_imageView.trailingAnchor.constraint(equalTo: _contentView.trailingAnchor, constant: -2),
_imageView.bottomAnchor.constraint(equalTo: _contentView.bottomAnchor, constant: -2)
])
_imageView.addSubview(_blurEffectView)
_blurEffectView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_blurEffectView.topAnchor.constraint(equalTo: _imageView.topAnchor),
_blurEffectView.leadingAnchor.constraint(equalTo: _imageView.leadingAnchor),
_blurEffectView.trailingAnchor.constraint(equalTo: _imageView.trailingAnchor),
_blurEffectView.bottomAnchor.constraint(equalTo: _imageView.bottomAnchor)
])
_contentView.addSubview(_titleLabel)
_titleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_titleLabel.leadingAnchor.constraint(equalTo: _contentView.leadingAnchor, constant: 8),
_titleLabel.trailingAnchor.constraint(equalTo: _contentView.trailingAnchor, constant: -8)
])
_contentView.addSubview(_subscribeLabel)
_subscribeLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_subscribeLabel.heightAnchor.constraint(equalToConstant: 28),
_subscribeLabel.topAnchor.constraint(equalTo: _titleLabel.bottomAnchor, constant: 12),
_subscribeLabel.leadingAnchor.constraint(equalTo: _contentView.leadingAnchor, constant: 8),
_subscribeLabel.trailingAnchor.constraint(equalTo: _contentView.trailingAnchor, constant: -8),
_subscribeLabel.bottomAnchor.constraint(equalTo: _contentView.bottomAnchor, constant: -16)
])
}
}
extension FutureStoryPreviewCell {
struct ViewState {
enum SubscriptionState {
case off
case on
case failure
}
let imageURL: URL
let subscriptionState: SubscriptionState
let daysToFutureStory: Int
}
func render(viewState: ViewState) {
_imageView.kf.setImage(
with: viewState.imageURL,
options: [.transition(.fade(0.5))]
)
_titleLabel.attributedText = NSMutableAttributedString(
numberOfDays: viewState.daysToFutureStory
)
_titleLabel.textAlignment = .center
switch viewState.subscriptionState {
case .off:
_subscribeLabel.text = L10n.FutureStoryPreviewCell.SubscribeButton.disabled
case .on:
_subscribeLabel.text = L10n.FutureStoryPreviewCell.SubscribeButton.enabled
case .failure:
_subscribeLabel.text = L10n.FutureStoryPreviewCell.SubscribeButton.failure
}
}
}
private extension NSMutableAttributedString {
convenience init(numberOfDays: Int) {
self.init(
string: L10n.FutureStoryPreview.days(numberOfDays)
)
let paragraphStyle = NSMutableParagraphStyle()
paragraphStyle.lineSpacing = 5
addAttribute(
.paragraphStyle,
value: paragraphStyle,
range: NSRange(location: 0, length: length)
)
}
}
<file_sep>/Stories/Foundation/HTTPClient.swift
//
// Created by onsissond.
//
import Moya
public typealias HTTPClient = MoyaProvider<MultiTarget>
#if DEBUG
extension MoyaProvider {
public static var mock: MoyaProvider {
MoyaProvider(
endpointClosure: MoyaProvider.defaultEndpointMapping,
stubClosure: MoyaProvider.immediatelyStub
)
}
}
#endif
<file_sep>/Stories/UI/StoriesPreviewViewController/UI/StoriesPreviewViewController.swift
//
// Created by onsissond.
//
import UIKit
import RxSwift
import ComposableArchitecture
enum StoriesPreviewCellIdentifier: String {
case regular
case future
}
class StoriesPreviewViewController: UIViewController {
private let _store: StoriesPreviewSystem.LocalStore
private let _disposeBag = DisposeBag()
private lazy var _viewStore = ViewStore(_store)
private weak var _presentedVC: UIViewController?
private lazy var _collectionView: UICollectionView = {
$0.contentInset = .init(top: 4, left: 16, bottom: 4, right: 16)
$0.showsHorizontalScrollIndicator = false
$0.register(
StoryPreviewCell.self,
forCellWithReuseIdentifier: StoriesPreviewCellIdentifier.regular.rawValue
)
$0.register(
FutureStoryPreviewCell.self,
forCellWithReuseIdentifier: StoriesPreviewCellIdentifier.future.rawValue
)
$0.delegate = self
$0.dataSource = self
return $0
}(UICollectionView(frame: .zero, collectionViewLayout: _flowLayout))
private lazy var _flowLayout: UICollectionViewFlowLayout = {
$0.scrollDirection = .horizontal
return $0
}(UICollectionViewFlowLayout())
init(store: StoriesPreviewSystem.LocalStore) {
_store = store
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
_setupAppearence()
_setupSubviews()
_setupSubscriptions()
_viewStore.send(.viewDidLoad)
}
private func _setupSubscriptions() {
_viewStore.publisher.map(\.stories)
.distinctUntilChanged()
.bind(onNext: { [weak self] _ in
self?._collectionView.reloadData()
})
.disposed(by: _disposeBag)
_viewStore.publisher.map(\.subscriptionState)
.distinctUntilChanged()
.do(onNext: { [weak self] _ in
guard let self = self else { return }
if let row = self._store.state.dataSource.firstIndex(where: {
if case .future = $0 { return true }
return false
}) {
self._collectionView.reloadItems(
at: [.init(row: row, section: 0)]
)
}
})
.subscribe()
.disposed(by: _disposeBag)
_viewStore.publisher.map(\.subscriptionState)
.bind(onNext: { [weak self] state in
guard let self = self else { return }
switch state {
case .failure(_, let needShowAlert) where needShowAlert == true:
self._showTurnOnNotificationAlert()
default:
break
}
})
.disposed(by: _disposeBag)
_store.scope(
state: \.storiesState,
action: StoriesPreviewSystem.Action.storiesAction
).ifLet(then: { [weak self] in
let vc = StoriesViewController(store: $0)
self?._presentedVC = vc
self?.present(vc, animated: true)
}, else: { [weak self] in
self?._presentedVC?.dismiss(animated: true, completion: nil)
})
.disposed(by: _disposeBag)
}
private func _setupAppearence() {
_collectionView.backgroundColor = .clear
view.backgroundColor = .white
}
private func _setupSubviews() {
view.addSubview(_collectionView)
_collectionView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_collectionView.topAnchor.constraint(equalTo: view.safeAreaLayoutGuide.topAnchor),
_collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
_collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
_collectionView.heightAnchor.constraint(equalToConstant: 150)
])
}
private func _showTurnOnNotificationAlert() {
present(
AlertBuilder()
.with(title: L10n.Alert.TurnOnNotifications.title)
.with(message: L10n.Alert.TurnOnNotifications.message)
.withAction(title: L10n.Alert.TurnOnNotifications.Button.cancel)
.withAction(
title: L10n.Alert.TurnOnNotifications.Button.ok,
isPreffered: true
) { [weak self] in
self?._viewStore.send(.openSettings)
}
.build(),
animated: true
)
}
}
extension StoriesPreviewViewController: UICollectionViewDataSource {
func numberOfSections(in collectionView: UICollectionView) -> Int {
1
}
func collectionView(
_ collectionView: UICollectionView,
numberOfItemsInSection section: Int
) -> Int {
_store.state.dataSource.count
}
}
extension StoriesPreviewViewController: UICollectionViewDelegate {
func collectionView(
_ collectionView: UICollectionView,
didSelectItemAt indexPath: IndexPath
) {
switch _store.state.dataSource[indexPath.row] {
case .story:
_viewStore.send(.openStories(index: indexPath.row))
case .future:
_viewStore.send(.switchNotifications)
}
}
}
extension StoriesPreviewViewController: UICollectionViewDelegateFlowLayout {
func collectionView(
_ collectionView: UICollectionView,
cellForItemAt indexPath: IndexPath
) -> UICollectionViewCell {
switch _store.state.dataSource[indexPath.row] {
case .story(let story):
let cell = collectionView.dequeueReusableCell(
withReuseIdentifier: StoriesPreviewCellIdentifier.regular.rawValue,
for: indexPath
)
if let cell = cell as? StoryPreviewCell {
cell.render(viewState: .init(storyPreview: story.preview))
}
return cell
case .future(let story):
let cell = collectionView.dequeueReusableCell(
withReuseIdentifier: StoriesPreviewCellIdentifier.future.rawValue,
for: indexPath
)
if let cell = cell as? FutureStoryPreviewCell {
cell.render(viewState: .init(
futureStory: story,
subscriptionState: _store.state.subscriptionState
))
}
return cell
}
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
sizeForItemAt indexPath: IndexPath
) -> CGSize {
CGSize(
width: collectionView.contentSize.height * 0.8,
height: collectionView.contentSize.height
)
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
insetForSectionAt section: Int
) -> UIEdgeInsets {
.zero
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
minimumLineSpacingForSectionAt section: Int
) -> CGFloat {
8
}
func collectionView(
_ collectionView: UICollectionView,
layout collectionViewLayout: UICollectionViewLayout,
minimumInteritemSpacingForSectionAt section: Int
) -> CGFloat {
0
}
}
private extension FutureStoryPreviewCell.ViewState {
init(
futureStory: FutureStory,
subscriptionState: StoriesPreviewSystem.SubscriptionState
) {
self.init(
imageURL: futureStory.imageURL,
subscriptionState: .init(subscriptionState: subscriptionState),
daysToFutureStory: futureStory.daysToFutureStory
)
}
}
private extension FutureStoryPreviewCell.ViewState.SubscriptionState {
init(subscriptionState: StoriesPreviewSystem.SubscriptionState) {
switch subscriptionState {
case .failure: self = .failure
case .off: self = .off
case .on: self = .on
}
}
}
private extension StoryPreviewCell.ViewState {
init(storyPreview: RegularStoryPreview) {
self.init(
imageURL: storyPreview.imageURL,
title: storyPreview.title,
description: storyPreview.description
)
}
}
<file_sep>/Stories/UI/StoryContentView/SummaryContentStoryView/SumeryInfoStoryView.swift
//
// Created by onsissond.
//
import UIKit
final class SummaryInfoStoryView: UIView {
private lazy var _contentView: UIView = {
$0.layer.cornerRadius = 10
$0.layer.masksToBounds = true
$0.backgroundColor = UIColor.black.withAlphaComponent(0.6)
return $0
}(UIView())
private lazy var _stackView: UIStackView = {
$0.axis = .vertical
$0.spacing = 8
return $0
}(UIStackView())
private lazy var _infoView = SummaryInfoDetailsStoryView()
private lazy var _extraInfoView = SummaryInfoDetailsStoryView()
init() {
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
addSubview(_contentView)
_contentView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_contentView.topAnchor.constraint(equalTo: topAnchor),
_contentView.leadingAnchor.constraint(equalTo: leadingAnchor),
_contentView.trailingAnchor.constraint(equalTo: trailingAnchor),
_contentView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
_contentView.addSubview(_stackView)
_stackView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_stackView.topAnchor.constraint(equalTo: topAnchor, constant: 16),
_stackView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16),
_stackView.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16),
_stackView.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -16)
])
_stackView.addArrangedSubview(_infoView)
_stackView.addArrangedSubview(_extraInfoView)
}
}
extension SummaryInfoStoryView {
enum ViewState {
case trip(from: SummaryStoryContent.TripInfo, to: SummaryStoryContent.TripInfo)
case accommodation(title: String, description: String)
case nutrition(title: String, description: String)
case entertainment(title: String, description: String)
}
func render(viewState: ViewState) {
switch viewState {
case let .trip(tripFrom, tripTo):
_infoView.render(viewState: .init(
icon: tripTo.transport.icon,
title: tripTo.title,
subtitle: tripTo.subtitle
))
let needIcon = tripTo.transport != tripFrom.transport
_extraInfoView.render(viewState: .init(
icon: needIcon ? tripFrom.transport.icon : nil,
title: tripFrom.title,
subtitle: tripFrom.subtitle
))
_extraInfoView.isHidden = false
case let .accommodation(title, description),
let .nutrition(title, description),
let .entertainment(title, description):
_infoView.render(viewState: .init(
icon: viewState.icon,
title: title,
subtitle: description
))
_extraInfoView.isHidden = true
}
}
}
private extension Transport {
var icon: UIImage {
switch self {
case .avia:
return Asset.Icon.flight.image
case .train:
return Asset.Icon.train.image
case .bus:
return Asset.Icon.bus.image
}
}
}
private extension SummaryInfoStoryView.ViewState {
var icon: UIImage? {
switch self {
case .trip:
return nil
case .accommodation:
return Asset.Icon.accommodation.image
case .nutrition:
return Asset.Icon.nutrition.image
case .entertainment:
return Asset.Icon.entertainment.image
}
}
}
<file_sep>/Stories/Core/Domain/Story.swift
//
// Created by onsissond.
//
import UIKit
struct Story: Equatable {
var id: String
var preview: RegularStoryPreview
var content: [StoryContent]
var publishDate: Date
var expireDate: Date
}
// MARK: - Decodable
extension Story: Decodable {
enum CodingKeys: String, CodingKey {
case id
case pages
case publishDate
case expireDate
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
id = try container.decode(String.self, forKey: .id)
preview = try RegularStoryPreview(from: decoder)
content = try container.decode([StoryContent].self, forKey: .pages)
let publishDateString = try container.decode(String.self, forKey: .publishDate)
let expireDateString = try container.decode(String.self, forKey: .expireDate)
let dateFormatter = DateFormatter.plusThreeDateFormatter
publishDate = dateFormatter.date(from: publishDateString)!
expireDate = dateFormatter.date(from: expireDateString)!
}
}
/// Вспомогательные функции для форматирования дат.
extension DateFormatter {
/// DateFormatter, настроенный на формат "yyyy-MM-dd" и на часовой пояс +3.
public static var plusThreeDateFormatter: DateFormatter {
let formatter = DateFormatter()
formatter.timeZone = TimeZone(secondsFromGMT: 60*60*3)!
formatter.dateFormat = "yyyy-MM-dd"
return formatter
}
}
<file_sep>/Stories/UI/StoriesPreviewViewController/UI/StoryPreviewCell.swift
//
// Created by onsissond.
//
import UIKit
import Kingfisher
final class StoryPreviewCell: UICollectionViewCell {
private lazy var _contentView: UIView = {
$0.backgroundColor = UIColor(hex: 0xAFD4FF)
$0.layer.cornerRadius = 10
$0.layer.masksToBounds = true
return $0
}(UIView())
private lazy var _imageView: UIImageView = {
$0.contentMode = .scaleAspectFill
$0.layer.cornerRadius = 10
$0.layer.masksToBounds = true
return $0
}(UIImageView())
private lazy var _gradientLayer: CAGradientLayer = .storyGradient
private lazy var _titleLabel: UILabel = {
$0.textColor = .white
$0.numberOfLines = 0
$0.font = .preferredFont(forTextStyle: .caption1)
return $0
}(UILabel())
private lazy var _descriptionLabel: UILabel = {
$0.textColor = .white
$0.numberOfLines = 0
$0.font = .systemFont(ofSize: 10)
return $0
}(UILabel())
override init(frame: CGRect) {
super.init(frame: frame)
_setupSubviews()
_setupAppearence()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func layoutSubviews() {
super.layoutSubviews()
_gradientLayer.frame = contentView.frame
}
private func _setupAppearence() {
_imageView.layer.addSublayer(_gradientLayer)
}
private func _setupSubviews() {
contentView.addSubview(_contentView)
_contentView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_contentView.topAnchor.constraint(equalTo: contentView.topAnchor),
_contentView.bottomAnchor.constraint(equalTo: contentView.bottomAnchor),
_contentView.leftAnchor.constraint(equalTo: contentView.leftAnchor),
_contentView.rightAnchor.constraint(equalTo: contentView.rightAnchor)
])
_contentView.addSubview(_imageView)
_imageView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_imageView.topAnchor.constraint(equalTo: _contentView.topAnchor, constant: 2),
_imageView.leadingAnchor.constraint(equalTo: _contentView.leadingAnchor, constant: 2),
_imageView.trailingAnchor.constraint(equalTo: _contentView.trailingAnchor, constant: -2),
_imageView.bottomAnchor.constraint(equalTo: _contentView.bottomAnchor, constant: -2)
])
_contentView.addSubview(_titleLabel)
_titleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_titleLabel.leadingAnchor.constraint(equalTo: _contentView.leadingAnchor, constant: 8),
_titleLabel.trailingAnchor.constraint(equalTo: _contentView.trailingAnchor, constant: -8)
])
_contentView.addSubview(_descriptionLabel)
_descriptionLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_descriptionLabel.topAnchor.constraint(equalTo: _titleLabel.bottomAnchor),
_descriptionLabel.leadingAnchor.constraint(equalTo: _contentView.leadingAnchor, constant: 8),
_descriptionLabel.trailingAnchor.constraint(equalTo: _contentView.trailingAnchor, constant: -8),
_descriptionLabel.bottomAnchor.constraint(equalTo: _contentView.bottomAnchor, constant: -8)
])
}
}
extension StoryPreviewCell {
struct ViewState {
let imageURL: URL
let title: String
let description: String?
}
func render(viewState: ViewState) {
_imageView.kf.setImage(
with: viewState.imageURL,
options: [.transition(.fade(0.5))]
)
_titleLabel.text = viewState.title
_descriptionLabel.text = viewState.description
}
}
<file_sep>/Stories/UI/StoriesPreviewViewController/Entity/FutureStory.swift
//
// Created by onsissond.
//
import Foundation
struct FutureStory: Equatable {
var imageURL: URL
var daysToFutureStory: Int
}
<file_sep>/Stories/RxOptional/AlertBuilder.swift
//
// Created by onsissond.
//
import UIKit
public enum AlertType {
case alert
case actionSheet
var prefferedStyle: UIAlertController.Style {
switch self {
case .alert:
return .alert
case .actionSheet:
return .actionSheet
}
}
}
public final class AlertBuilder {
private let _alert: UIAlertController
public init(_ alertType: AlertType = .alert) {
_alert = UIAlertController(
title: "",
message: nil,
preferredStyle: alertType.prefferedStyle
)
}
public func with(title: String) -> AlertBuilder {
_alert.title = title
return self
}
public func with(message: String) -> AlertBuilder {
_alert.message = message
return self
}
public func withAction(title: String, isPreffered: Bool = false, action: (() -> Void)? = nil) -> AlertBuilder {
let newAlertAction = UIAlertAction(title: title, style: .default) { _ in action?() }
_alert.addAction(newAlertAction)
if isPreffered {
_alert.preferredAction = newAlertAction
}
return self
}
public func withDestructive(title: String, action: (() -> Void)? = nil) -> AlertBuilder {
let newAlertAction = UIAlertAction(title: title, style: .destructive) { _ in action?() }
_alert.addAction(newAlertAction)
return self
}
public func withCancel(title: String, action: (() -> Void)? = nil) -> AlertBuilder {
let newAlertAction = UIAlertAction(title: title, style: .cancel) { _ in action?() }
_alert.addAction(newAlertAction)
return self
}
public func build() -> UIAlertController {
_alert
}
}
<file_sep>/Stories/Foundation/Date+mock.swift
//
// Created by onsissond.
//
import Foundation
extension Date {
static var mock: Date {
Date(timeIntervalSinceReferenceDate: 0)
}
static func mock(
_ stringValue: String,
dateFormat: String = "yyyy-MM-dd HH:mm:ss",
timeZone: TimeZone = .current
) -> Date? {
let formatter: DateFormatter = {
$0.dateFormat = dateFormat
$0.timeZone = timeZone
return $0
}(DateFormatter())
return formatter.date(from: stringValue)
}
}
<file_sep>/Stories/UI/StoryContentView/SummaryContentStoryView/SummaryInfoDetailsStoryView.swift
//
// Created by onsissond.
//
import UIKit
final class SummaryInfoDetailsStoryView: UIView {
private lazy var _iconView: UIImageView = {
$0.contentMode = .center
$0.setContentHuggingPriority(.required, for: .horizontal)
return $0
}(UIImageView())
private lazy var _titleLabel: UILabel = {
$0.font = .preferredFont(forTextStyle: .headline)
$0.textColor = .white
$0.numberOfLines = 0
$0.setContentHuggingPriority(.required, for: .vertical)
return $0
}(UILabel())
private lazy var _subtitleLabel: UILabel = {
$0.font = .systemFont(ofSize: 12)
$0.textColor = .lightGray
$0.numberOfLines = 0
$0.setContentHuggingPriority(.required, for: .vertical)
return $0
}(UILabel())
init() {
super.init(frame: .zero)
_setupSubviews()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func _setupSubviews() {
addSubview(_iconView)
_iconView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_iconView.topAnchor.constraint(equalTo: topAnchor),
_iconView.leadingAnchor.constraint(equalTo: leadingAnchor),
_iconView.widthAnchor.constraint(equalToConstant: 24),
_iconView.heightAnchor.constraint(equalToConstant: 24)
])
addSubview(_titleLabel)
_titleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_titleLabel.topAnchor.constraint(equalTo: topAnchor),
_titleLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 32),
_titleLabel.trailingAnchor.constraint(equalTo: trailingAnchor)
])
addSubview(_subtitleLabel)
_subtitleLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
_subtitleLabel.topAnchor.constraint(equalTo: _titleLabel.bottomAnchor),
_subtitleLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 32),
_subtitleLabel.trailingAnchor.constraint(equalTo: trailingAnchor),
_subtitleLabel.bottomAnchor.constraint(equalTo: bottomAnchor)
])
}
}
extension SummaryInfoDetailsStoryView {
struct ViewState {
var icon: UIImage?
var title: String
var subtitle: String
}
func render(viewState: ViewState) {
_iconView.image = viewState.icon
_titleLabel.text = viewState.title
_subtitleLabel.text = viewState.subtitle
}
}
<file_sep>/Stories/UI/StoriesFactory.swift
//
// Created by onsissond.
//
import RxSwift
import UIKit
public enum StoriesFactory {
public struct Dependency {
var httpClient: HTTPClient
var dateProvider: () -> Date
var openURL: (URL) -> Void
public init(
httpClient: HTTPClient,
dateProvider: @escaping () -> Date,
openURL: @escaping (URL) -> Void
) {
self.httpClient = httpClient
self.dateProvider = dateProvider
self.openURL = openURL
}
}
public static func create(
dependency: Dependency,
payload: Void
) -> UIViewController {
StoriesPreviewViewController(
store: StoriesPreviewSystem.LocalStore(
initialState: .init(),
reducer: StoriesPreviewSystem.reducer,
environment: StoriesPreviewSystem.Environment(
dependency: dependency
)
)
)
}
}
private extension StoriesPreviewSystem.Environment {
init(
dependency: StoriesFactory.Dependency
) {
let storiesService = StoriesService.live
let fetchStories = storiesService.fetchStories(dependency.httpClient)
self.init(
fetchStories: {
fetchStories(dependency.dateProvider())
.asObservable()
.eraseToEffect()
},
currentDate: dependency.dateProvider,
uuid: UUID.init,
calendar: { Calendar.current },
notificationService: .live,
storiesEnvironment: .init(
storyEnvironment: .init(
openURL: dependency.openURL
)
)
)
}
}
<file_sep>/StoriesAppTests/StoriesAppTests.swift
//
// Created by onsissond.
//
import XCTest
@testable import StoriesApp
class StoriesAppTests: XCTestCase {
func testExample() {}
}
| 03fb4f86fc8defa0194e0566dccc141da1097980 | [
"Swift",
"Markdown"
] | 44 | Swift | onsissond/Stories | 4d2e95495e02efe0f96ba60fcc4f93c0d8fbe5f4 | 71b35f87bd3d2e8bfd0071b60f3a9d86fd1032ae | |
refs/heads/master | <file_sep>app.controller('MainPageController', ['$scope','PhotosService', function($scope, photosService) {
var vm = this;
vm.title = 'AngularJS Exercises';
vm.selectedAlbum = -1;
vm.albums = [];
vm.photos = [];
vm.number = 1234.567;
vm.albumClick = function(albumId) {
photosService.getAlbum(albumId).then(function(photos) {
vm.photos = photos;
});
if (albumId === vm.selectedAlbum) {
vm.selectedAlbum = -1;
}
else {
vm.selectedAlbum = albumId;
}
}
photosService.getAllAlbums().then(function(albums) {
vm.albums = albums;
});
}]);<file_sep>var app = angular.module('angular-exercises', ['ngSanitize']);
<file_sep>app.factory('PhotosService',['$http','$q', function($http,$q) {
var serviceFactory = {};
var photosUrl = 'https://jsonplaceholder.typicode.com/photos';
serviceFactory.getAllAlbums = function() {
var albumsPromise = $http.get(photosUrl).then(function(response) {
var albums = [];
var currentAlbumId = 0;
if (typeof response === 'object') {
_.forEach(response.data, function(album) {
if (!_.find(albums, { 'albumId' : album.albumId })) {
albums.push({
albumTitle: 'Album: ' + album.albumId,
albumId: album.albumId,
albumThumbnailUrl: album.thumbnailUrl
});
}
}, this);
}
return albums;
},
function (error) {
return $q.reject(response.data);
});
return albumsPromise;
}
serviceFactory.getAlbum = function(albumId) {
var getAlbumUrl = photosUrl + '?albumId=' + albumId;
var albumPromise = $http.get(getAlbumUrl).then(function(response) {
if (typeof response === 'object') {
return response.data;
}
else {
return $q.reject('Data error');
}
},
function (response) {
return $q.reject(response.data);
});
return albumPromise;
}
return serviceFactory;
}]);<file_sep>app.filter('strange', ['$sce', function($sce) {
function getFract(value) {
var splited = value.toString().split('.');
if (_.isArray(splited) && splited.length === 2) {
return _.toInteger(splited[1]);
}
}
return function(value, precision) {
var result = '';
var splited = value.toString().split('.');
if (_.isArray(splited) && splited.length === 2) {
var fract = _.toNumber('0.' + splited[1]);
if (precision) {
fract = _.round(fract, precision);
}
result = splited[0] + '<sup>' + getFract(fract) +'</sup>';
}
return $sce.trustAsHtml(result);
}
}]);<file_sep>app.directive('displayPhoto', [function() {
var controller = ['$scope', function($scope) {
$scope.bigImageVisible = false;
$scope.imageClicked = function() {
$scope.bigImageVisible = true;
}
$scope.closeBigPhoto = function() {
$scope.bigImageVisible = false;
}
}];
return {
restrict: 'E',
scope: {
photo: '='
},
templateUrl: 'app/photo.directive.html',
controller: controller
};
}]); | b7de44260ca61d37def4335b7f33206fe84d4925 | [
"JavaScript"
] | 5 | JavaScript | radoslawmarek/angular-exercises | 77fef8217fe5e07d90e7f77ff45fe9525fabc22c | b041667d5e5eed5f566c8b5b69533460cbdc2f78 | |
refs/heads/master | <file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading.Tasks;
using System.Web;
using Microsoft.Cloud.Metrics.Client.Query;
using Microsoft.Cloud.Metrics.Client.Utility;
using Microsoft.Online.Metrics.Serialization;
using Microsoft.Online.Metrics.Serialization.Configuration;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
/// <summary>
/// The metrics reader class to read metrics data as well as metrics metadata.
/// </summary>
public sealed class MetricReader : IMetricReader
{
/// <summary>
/// When a dimension has this value, it is not a pre-condition but it is part of the pre-aggregation.
/// </summary>
public const string ContextualHintingWildcardValue = "{{*}}";
#pragma warning disable SA1401 // Fields must be private
/// <summary>
/// The relative URL for metrics data
/// </summary>
public readonly string DataRelativeUrl;
/// <summary>
/// The relative URL for metrics meta-data a.k.a. hinting data
/// </summary>
public readonly string MetaDataRelativeUrl;
/// <summary>
/// The relative URL for metrics meta-data a.k.a. hinting data V2.
/// </summary>
public readonly string MetaDataRelativeUrlV2;
/// <summary>
/// The relative Url for distributed query.
/// </summary>
public readonly string DistributedQueryRelativeUrl;
/// <summary>
/// The query service relative URL.
/// </summary>
public readonly string QueryServiceRelativeUrl;
#pragma warning restore SA1401 // Fields must be private
private const int MillisecondsPerMinute = 60000;
/// <summary>
/// The empty string array.
/// </summary>
private static readonly string[] EmptyStringArray = new string[0];
/// <summary>
/// The empty pre-aggregate configurations.
/// </summary>
private static readonly List<PreAggregateConfiguration> EmptyPreAggregateConfigurations = new List<PreAggregateConfiguration>();
/// <summary>
/// The HTTP client instance.
/// </summary>
private readonly HttpClient httpClient;
/// <summary>
/// The connection information.
/// </summary>
private readonly ConnectionInfo connectionInfo;
/// <summary>
/// The metric configuration manager.
/// </summary>
private readonly MetricConfigurationManager metricConfigurationManager;
/// <summary>
/// The string identifying client.
/// </summary>
private readonly string clientId;
/// <summary>
/// Initializes a new instance of the <see cref="MetricReader"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
/// <param name="clientId">The string identifying client.</param>
public MetricReader(ConnectionInfo connectionInfo, string clientId = "ClientAPI")
: this(connectionInfo, HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo), clientId)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MetricReader"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
/// <param name="authHeaderValue">The auth header.</param>
/// <param name="clientId">The string identifying client.</param>
public MetricReader(ConnectionInfo connectionInfo, string authHeaderValue, string clientId = "ClientAPI")
: this(connectionInfo, HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo, authHeaderValue), clientId)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MetricReader"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
/// <param name="httpClient">The http client with auth info.</param>
/// <param name="clientId">The string identifying client.</param>
internal MetricReader(ConnectionInfo connectionInfo, HttpClient httpClient, string clientId = "ClientAPI")
{
if (connectionInfo == null)
{
throw new ArgumentNullException("connectionInfo");
}
this.connectionInfo = connectionInfo;
this.DataRelativeUrl = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.DataRelativeUrl);
this.MetaDataRelativeUrl = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.MetaDataRelativeUrl);
this.MetaDataRelativeUrlV2 = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.MetaDataRelativeUrlV2);
this.DistributedQueryRelativeUrl = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.DistributedQueryRelativeUrl);
this.QueryServiceRelativeUrl = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.QueryServiceRelativeUrl);
this.clientId = clientId;
this.httpClient = httpClient;
this.metricConfigurationManager = new MetricConfigurationManager(this.connectionInfo);
}
/// <summary>
/// Gets the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="definition">The time series definition.</param>
/// <returns>
/// The time series of for the given <paramref name="definition"/>.
/// </returns>
public async Task<TimeSeries<MetricIdentifier, double?>> GetTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
TimeSeriesDefinition<MetricIdentifier> definition)
{
if (definition == null)
{
throw new ArgumentNullException("definition");
}
var series = await this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, samplingType, definition).ConfigureAwait(false);
return series.First();
}
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
public Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
params TimeSeriesDefinition<MetricIdentifier>[] definitions)
{
return this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, samplingType, definitions.AsEnumerable());
}
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
public Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions)
{
return this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, new[] { samplingType }, definitions);
}
/// <summary>
/// Gets the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="definition">The time series definition.</param>
/// <returns>
/// The time series of for the given <paramref name="definition"/>.
/// </returns>
/// <remarks>This API uses <see cref="AggregationType.Automatic"/> by default and other overloads are available for specific <see cref="AggregationType"/>.</remarks>
public async Task<TimeSeries<MetricIdentifier, double?>> GetTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
int seriesResolutionInMinutes,
TimeSeriesDefinition<MetricIdentifier> definition)
{
if (definition == null)
{
throw new ArgumentNullException("definition");
}
var series = await this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, samplingType, seriesResolutionInMinutes, definition).ConfigureAwait(false);
return series.First();
}
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
/// <remarks>This API uses <see cref="AggregationType.Automatic"/> by default and other overloads are available for specific <see cref="AggregationType"/>.</remarks>
public Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
int seriesResolutionInMinutes,
params TimeSeriesDefinition<MetricIdentifier>[] definitions)
{
return this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, samplingType, seriesResolutionInMinutes, definitions.AsEnumerable());
}
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
/// <remarks>This API uses <see cref="AggregationType.Automatic"/> by default and other overloads are available for specific <see cref="AggregationType"/>.</remarks>
public Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
int seriesResolutionInMinutes,
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions)
{
return this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, new[] { samplingType }, definitions, seriesResolutionInMinutes);
}
/// <summary>
/// Gets time series with multiple sampling types.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="definition">The time series definition.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="aggregationType">The aggregation function used to reduce the resolution of the returned series.</param>
/// <returns>
/// The time series for the given <paramref name="definition" />.
/// </returns>
public async Task<TimeSeries<MetricIdentifier, double?>> GetTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType[] samplingTypes,
TimeSeriesDefinition<MetricIdentifier> definition,
int seriesResolutionInMinutes = 1,
AggregationType aggregationType = AggregationType.Automatic)
{
if (definition == null)
{
throw new ArgumentNullException(nameof(definition));
}
var series = await this.GetMultipleTimeSeriesAsync(startTimeUtc, endTimeUtc, samplingTypes, new[] { definition }, seriesResolutionInMinutes, aggregationType).ConfigureAwait(false);
return series.FirstOrDefault();
}
/// <summary>
/// Gets a list of the time series, each with multiple sampling types.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="definitions">The time series definitions.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="aggregationType">The aggregation function used to reduce the resolution of the returned series.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
public async Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType[] samplingTypes,
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions,
int seriesResolutionInMinutes = 1,
AggregationType aggregationType = AggregationType.Automatic)
{
if (definitions == null)
{
throw new ArgumentNullException("definitions");
}
if (samplingTypes == null || samplingTypes.Length == 0)
{
throw new ArgumentException("cannot be null or empty", nameof(samplingTypes));
}
if (seriesResolutionInMinutes < SerializationConstants.DefaultSeriesResolutionInMinutes)
{
throw new ArgumentException($"{seriesResolutionInMinutes} must be >= {SerializationConstants.DefaultSeriesResolutionInMinutes}", nameof(samplingTypes));
}
List<TimeSeriesDefinition<MetricIdentifier>> definitionList = definitions.ToList();
if (definitionList.Count == 0)
{
throw new ArgumentException("The count of 'definitions' is 0.");
}
if (definitionList.Any(d => d == null))
{
throw new ArgumentException("At least one of definitions are null.");
}
if (startTimeUtc > endTimeUtc)
{
throw new ArgumentException(string.Format("startTimeUtc [{0}] must be <= endTimeUtc [{1}]", startTimeUtc, endTimeUtc));
}
NormalizeTimeRange(ref startTimeUtc, ref endTimeUtc);
foreach (var timeSeriesDefinition in definitionList)
{
timeSeriesDefinition.SamplingTypes = samplingTypes;
timeSeriesDefinition.StartTimeUtc = startTimeUtc;
timeSeriesDefinition.EndTimeUtc = endTimeUtc;
timeSeriesDefinition.SeriesResolutionInMinutes = seriesResolutionInMinutes;
timeSeriesDefinition.AggregationType = aggregationType;
}
return await this.GetMultipleTimeSeriesAsync(definitionList).ConfigureAwait(false);
}
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions"/>.
/// </returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
public async Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(IList<TimeSeriesDefinition<MetricIdentifier>> definitions)
{
using (var response = await this.GetMultipleTimeSeriesAsync(definitions, MetricQueryResponseDeserializer.CurrentVersion, returnMetricNames: false).ConfigureAwait(false))
{
using (var stream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false))
{
return MetricQueryResponseDeserializer.Deserialize(stream, definitions.ToArray()).Item2;
}
}
}
/// <summary>
/// Gets the list of namespaces for the <paramref name="monitoringAccount"/>.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <returns>The list of namespaces for the <paramref name="monitoringAccount"/>.</returns>
public async Task<IReadOnlyList<string>> GetNamespacesAsync(string monitoringAccount)
{
if (string.IsNullOrWhiteSpace(monitoringAccount))
{
throw new ArgumentException("monitoringAccount is null or empty.");
}
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace",
this.connectionInfo.GetEndpoint(monitoringAccount),
this.MetaDataRelativeUrl,
monitoringAccount);
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Get,
this.httpClient,
monitoringAccount,
this.MetaDataRelativeUrl,
null,
this.clientId).ConfigureAwait(false);
return JsonConvert.DeserializeObject<string[]>(response.Item1);
}
/// <summary>
/// Gets the list of metric names for the <paramref name="monitoringAccount" /> and <paramref name="metricNamespace" />.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <returns>
/// The list of metric names for the <paramref name="monitoringAccount" /> and <paramref name="metricNamespace" />.
/// </returns>
public async Task<IReadOnlyList<string>> GetMetricNamesAsync(string monitoringAccount, string metricNamespace)
{
if (string.IsNullOrWhiteSpace(monitoringAccount))
{
throw new ArgumentException("monitoringAccount is null or empty.");
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentException("metricNamespace is null or empty.");
}
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric",
this.connectionInfo.GetEndpoint(monitoringAccount),
this.MetaDataRelativeUrl,
monitoringAccount,
SpecialCharsHelper.EscapeTwice(metricNamespace));
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Get,
this.httpClient,
monitoringAccount,
this.MetaDataRelativeUrl,
null,
this.clientId).ConfigureAwait(false);
return JsonConvert.DeserializeObject<string[]>(response.Item1);
}
/// <summary>
/// Gets the list of dimension names for the <paramref name="metricId" />.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <returns>
/// The list of dimension names for the <paramref name="metricId" />.
/// </returns>
public async Task<IReadOnlyList<string>> GetDimensionNamesAsync(MetricIdentifier metricId)
{
var config = await this.metricConfigurationManager.Get(metricId).ConfigureAwait(false);
return config.DimensionConfigurations == null ? EmptyStringArray : config.DimensionConfigurations.Select(d => d.Id).ToArray();
}
/// <summary>
/// Gets the list of pre-aggregate configurations for the <paramref name="metricId" />.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <returns>
/// The list of pre-aggregate configurations for the <paramref name="metricId" />.
/// </returns>
public async Task<IReadOnlyList<PreAggregateConfiguration>> GetPreAggregateConfigurationsAsync(MetricIdentifier metricId)
{
var config = await this.metricConfigurationManager.Get(metricId).ConfigureAwait(false);
return config.PreAggregations ?? EmptyPreAggregateConfigurations;
}
/// <summary>
/// Gets the known dimension combinations.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <returns>Time series definitions with known dimension combinations.</returns>
public Task<IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>>> GetKnownTimeSeriesDefinitionsAsync(
MetricIdentifier metricId,
params DimensionFilter[] dimensionFilters)
{
return this.GetKnownTimeSeriesDefinitionsAsync(metricId, dimensionFilters.AsEnumerable());
}
/// <summary>
/// Gets the known dimension combinations.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <returns>Time series definitions with known dimension combinations.</returns>
public async Task<IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>>> GetKnownTimeSeriesDefinitionsAsync(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters)
{
return await this.GetKnownTimeSeriesDefinitionsAsync(metricId, dimensionFilters, DateTime.MinValue, DateTime.MaxValue).ConfigureAwait(false);
}
/// <summary>
/// Gets the known dimension combinations.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">Start time for evaluating dimension values.</param>
/// <param name="endTimeUtc">End time for evaluating dimension values.</param>
/// <param name="newCombinationsOnly">
/// If true only combinations which were added into memory in this session of hinting system after fromTimeUtc.
/// This flag does *not* guarantee that only new combinations will be returned
/// It is more of a hint to the hinting system to try to give only new combinations in given time range.
/// </param>
/// <returns>Time series definitions with known dimension combinations.</returns>
public async Task<IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>>> GetKnownTimeSeriesDefinitionsAsync(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
bool newCombinationsOnly = false)
{
metricId.Validate();
var dimensionNamesAndConstraints = GetDimensionNamesAndConstraints(dimensionFilters);
var url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metricName/{4}/startTimeUtcMillis/{5}/endTimeUtcMillis/{6}",
this.connectionInfo.GetEndpoint(metricId.MonitoringAccount),
this.MetaDataRelativeUrl,
SpecialCharsHelper.EscapeTwice(metricId.MonitoringAccount),
SpecialCharsHelper.EscapeTwice(metricId.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricId.MetricName),
UnixEpochHelper.GetMillis(startTimeUtc),
UnixEpochHelper.GetMillis(endTimeUtc));
if (newCombinationsOnly)
{
url = string.Format("{0}/newOnly", url);
}
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
metricId.MonitoringAccount,
this.MetaDataRelativeUrl,
dimensionNamesAndConstraints,
this.clientId).ConfigureAwait(false);
var deserializedResponse = JsonConvert.DeserializeObject<Tuple<List<string>, List<List<string>>>>(response.Item1);
TimeSeriesDefinition<MetricIdentifier>[] results;
if (deserializedResponse != null && deserializedResponse.Item1 != null && deserializedResponse.Item2 != null)
{
results = new TimeSeriesDefinition<MetricIdentifier>[deserializedResponse.Item2.Count];
var preaggreate = deserializedResponse.Item1;
for (var index = 0; index < deserializedResponse.Item2.Count; index++)
{
var r = deserializedResponse.Item2[index];
var dimensionCombination = new KeyValuePair<string, string>[r.Count];
for (var i = 0; i < preaggreate.Count; i++)
{
dimensionCombination[i] = new KeyValuePair<string, string>(preaggreate[i], r[i]);
}
results[index] = new TimeSeriesDefinition<MetricIdentifier>(metricId, dimensionCombination);
}
}
else
{
results = new TimeSeriesDefinition<MetricIdentifier>[0];
}
return results;
}
/// <summary>
/// Gets the dimension values for <paramref name="dimensionName"/> satifying the <paramref name="dimensionFilters"/>
/// and time range (<paramref name="startTimeUtc"/>, <paramref name="endTimeUtc"/>)
/// </summary>
/// <remarks>
/// Time range resolution is day.
/// </remarks>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">
/// The dimension filters representing the pre-aggregate dimensions.
/// Create an emtpy include filter for dimension with no filter values.
/// Requested dimension should also be part of this and should be empty.
/// </param>
/// <param name="dimensionName">Name of the dimension for which values are requested.</param>
/// <param name="startTimeUtc">Start time for evaluating dimension values.</param>
/// <param name="endTimeUtc">End time for evaluating dimension values.</param>
/// <returns>Dimension values for <paramref name="dimensionName"/>.</returns>
public async Task<IReadOnlyList<string>> GetDimensionValuesAsync(
MetricIdentifier metricId,
List<DimensionFilter> dimensionFilters,
string dimensionName,
DateTime startTimeUtc,
DateTime endTimeUtc)
{
metricId.Validate();
if (dimensionFilters == null || dimensionFilters.Count == 0)
{
throw new ArgumentException("Dimension filters cannot be empty or null");
}
dimensionFilters.Sort((item1, item2) => string.Compare(item1.DimensionName, item2.DimensionName, StringComparison.OrdinalIgnoreCase));
var url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}/startTimeUtcMillis/{5}/endTimeUtcMillis/{6}/dimension/{7}",
this.connectionInfo.GetEndpoint(metricId.MonitoringAccount),
this.MetaDataRelativeUrlV2,
SpecialCharsHelper.EscapeTwice(metricId.MonitoringAccount),
SpecialCharsHelper.EscapeTwice(metricId.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricId.MetricName),
UnixEpochHelper.GetMillis(startTimeUtc),
UnixEpochHelper.GetMillis(endTimeUtc),
SpecialCharsHelper.EscapeTwice(dimensionName));
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
metricId.MonitoringAccount,
this.MetaDataRelativeUrlV2,
null,
this.clientId,
JsonConvert.SerializeObject(dimensionFilters)).ConfigureAwait(false);
return JsonConvert.DeserializeObject<List<string>>(response.Item1);
}
/// <summary>
/// Gets the known dimension combinations that match the query criteria.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingType">Sampling type to use for this metric.</param>
/// <param name="reducer">The reducing function to apply to the time series.</param>
/// <param name="queryFilter">Filter criteria to enforce on the query.</param>
/// <param name="includeSeries">Indicate whether or not to include the raw time series data in the result.</param>
/// <param name="selectionClause">Reduce result to top N results of the query.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series.</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution.</param>
/// <returns>Time series definitions matching the query criteria.</returns>
[Obsolete]
public async Task<IReadOnlyList<IQueryResult>> GetFilteredDimensionValuesAsync(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
Reducer reducer,
QueryFilter queryFilter,
bool includeSeries,
SelectionClause selectionClause = null,
AggregationType aggregationType = AggregationType.Sum,
long seriesResolutionInMinutes = 1)
{
SortedDictionary<string, Tuple<bool, IReadOnlyList<string>>> dimensionNamesAndConstraints;
var query = this.BuildQueryParameters(metricId, dimensionFilters, startTimeUtc, endTimeUtc, samplingType, reducer, queryFilter, includeSeries, selectionClause, aggregationType, seriesResolutionInMinutes, out dimensionNamesAndConstraints);
string path = string.Format(
"{0}/monitoringAccount/{1}/metricNamespace/{2}/metric/{3}",
this.DistributedQueryRelativeUrl,
SpecialCharsHelper.EscapeTwice(metricId.MonitoringAccount),
SpecialCharsHelper.EscapeTwice(metricId.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricId.MetricName));
var builder = new UriBuilder(this.connectionInfo.GetEndpoint(metricId.MonitoringAccount))
{
Path = path,
Query = query.ToString()
};
var response = await HttpClientHelper.GetResponse(
builder.Uri,
HttpMethod.Post,
this.httpClient,
metricId.MonitoringAccount,
this.DistributedQueryRelativeUrl,
dimensionNamesAndConstraints,
this.clientId).ConfigureAwait(false);
return JsonConvert.DeserializeObject<QueryResult[]>(response.Item1);
}
/// <summary>
/// Gets the known dimension combinations that match the query criteria.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingType">Sampling type to use for this metric.</param>
/// <param name="reducer">The reducing function to apply to the time series.</param>
/// <param name="queryFilter">Filter criteria to enforce on the query.</param>
/// <param name="includeSeries">Indicate whether or not to include the raw time series data in the result.</param>
/// <param name="selectionClause">Reduce result to top N results of the query.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series.</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution.</param>
/// <returns>Time series definitions matching the query criteria.</returns>
[Obsolete]
public async Task<QueryResultsList> GetFilteredDimensionValuesAsyncV2(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
Reducer reducer,
QueryFilter queryFilter,
bool includeSeries,
SelectionClause selectionClause = null,
AggregationType aggregationType = AggregationType.Sum,
long seriesResolutionInMinutes = 1)
{
SortedDictionary<string, Tuple<bool, IReadOnlyList<string>>> dimensionNamesAndConstraints;
var query = this.BuildQueryParameters(metricId, dimensionFilters, startTimeUtc, endTimeUtc, samplingType, reducer, queryFilter, includeSeries, selectionClause, aggregationType, seriesResolutionInMinutes, out dimensionNamesAndConstraints);
string operation = $"{this.DistributedQueryRelativeUrl}/V2";
string path = string.Format(
"{0}/monitoringAccount/{1}/metricNamespace/{2}/metric/{3}",
operation,
SpecialCharsHelper.EscapeTwice(metricId.MonitoringAccount),
SpecialCharsHelper.EscapeTwice(metricId.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricId.MetricName));
var builder = new UriBuilder(this.connectionInfo.GetEndpoint(metricId.MonitoringAccount))
{
Path = path,
Query = query.ToString()
};
var response = await HttpClientHelper.GetResponse(
builder.Uri,
HttpMethod.Post,
this.httpClient,
metricId.MonitoringAccount,
operation,
dimensionNamesAndConstraints,
this.clientId).ConfigureAwait(false);
return JsonConvert.DeserializeObject<QueryResultsList>(response.Item1);
}
/// <summary>
/// Gets the time series values that match the filtering criteria.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="selectionClause">Reduce result to top N results of the query. By default, all results are returned.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series. By default, automatic resolution is used (same as Jarvis UI).</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution. 1 minute resolution (full resolution in MDM today) by default.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <param name="outputDimensionNames">The output dimension names.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.</param>
/// <returns>
/// Time series definitions matching the query criteria.
/// </returns>
[Obsolete]
public async Task<IQueryResultListV3> GetFilteredDimensionValuesAsyncV3(
MetricIdentifier metricId,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
IReadOnlyList<SamplingType> samplingTypes,
SelectionClauseV3 selectionClause = null,
AggregationType aggregationType = AggregationType.Automatic,
long seriesResolutionInMinutes = 1,
Guid? traceId = null,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false)
{
if (samplingTypes == null || samplingTypes.Count == 0)
{
throw new ArgumentException("One or more sampling types must be specified.");
}
if (selectionClause == null)
{
selectionClause = new SelectionClauseV3(new PropertyDefinition(PropertyAggregationType.Average, samplingTypes[0]), int.MaxValue, OrderBy.Undefined);
}
if (dimensionFilters == null)
{
throw new ArgumentNullException(nameof(dimensionFilters));
}
if (startTimeUtc > endTimeUtc)
{
throw new ArgumentException("Start time must be before end time.");
}
traceId = traceId ?? Guid.NewGuid();
var request = new FilteredTimeSeriesQueryRequest(
metricId,
samplingTypes,
dimensionFilters,
startTimeUtc,
endTimeUtc,
(int)seriesResolutionInMinutes,
aggregationType,
selectionClause.PropertyDefinition,
selectionClause.NumberOfResultsToReturn,
selectionClause.OrderBy,
false,
outputDimensionNames,
lastValueMode);
var url = string.Format(
"{0}{1}/v1/multiple/serializationVersion/{2}/maxCost/{3}?timeoutInSeconds={4}&returnRequestObjectOnFailure={5}",
this.connectionInfo.GetMetricsDataQueryEndpoint(request.MetricIdentifier.MonitoringAccount).OriginalString,
this.QueryServiceRelativeUrl,
FilteredTimeSeriesQueryResponse.CurrentVersion,
int.MaxValue,
(int)this.connectionInfo.Timeout.TotalSeconds,
false);
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
null, // TODO add support of monitoring account on server side and then pass it here
null, // TODO add support of monitoring account on server side and pass operation here
new[] { request },
this.clientId,
null,
traceId,
FilteredTimeSeriesQueryResponse.CurrentVersion).ConfigureAwait(false);
using (HttpResponseMessage httpResponseMessage = response.Item2)
{
if (httpResponseMessage.StatusCode != HttpStatusCode.OK)
{
throw new MetricsClientException($"Request failed with HTTP Status Code: {httpResponseMessage.StatusCode}. TraceId: {traceId}. Response: {response.Item1}");
}
IReadOnlyList<IFilteredTimeSeriesQueryResponse> results;
using (var stream = await httpResponseMessage.Content.ReadAsStreamAsync().ConfigureAwait(false))
{
results = FilteredQueryResponseDeserializer.Deserialize(stream);
}
if (results == null || results.Count == 0)
{
throw new MetricsClientException($"Response is null or empty. TraceId: {traceId}");
}
IFilteredTimeSeriesQueryResponse result = results[0];
if (result.ErrorCode != FilteredTimeSeriesQueryResponseErrorCode.Success)
{
throw new MetricsClientException(
$"Error occured processing the request. Error code: {result.ErrorCode}. {result.DiagnosticInfo}",
null,
traceId.Value,
httpResponseMessage.StatusCode);
}
return new QueryResultListV3(result.StartTimeUtc, result.EndTimeUtc, result.TimeResolutionInMinutes, (IReadOnlyList<FilteredTimeSeries>)result.FilteredTimeSeriesList);
}
}
/// <summary>
/// Gets the time series values that match the filtering criteria.
/// This version of code gets values one by one and is useful for scenarios where you do not want to
/// keep the list of the results in memory and process them on the fly.
/// Note: This API is for advanced scenarios only. Use it only when you fetch huge amounts of metrics from
/// multiple stamps in parallel and face performance problems related to memory usage
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="selectionClause">Reduce result to top N results of the query. By default, all results are returned.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series. By default, automatic resolution is used (same as Jarvis UI).</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution. 1 minute resolution (full resolution in MDM today) by default.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <param name="outputDimensionNames">The output dimension names.</param>
/// <returns>
/// The response message.
/// </returns>
public async Task<HttpResponseMessage> GetTimeSeriesStreamedAsync(
MetricIdentifier metricId,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
IReadOnlyList<SamplingType> samplingTypes,
SelectionClauseV3 selectionClause = null,
AggregationType aggregationType = AggregationType.Automatic,
long seriesResolutionInMinutes = 1,
Guid? traceId = null,
IReadOnlyList<string> outputDimensionNames = null)
{
if (samplingTypes == null || samplingTypes.Count == 0)
{
throw new ArgumentException("One or more sampling types must be specified.");
}
if (selectionClause == null)
{
selectionClause = new SelectionClauseV3(new PropertyDefinition(PropertyAggregationType.Average, samplingTypes[0]), int.MaxValue, OrderBy.Undefined);
}
if (dimensionFilters == null)
{
throw new ArgumentNullException(nameof(dimensionFilters));
}
if (startTimeUtc > endTimeUtc)
{
throw new ArgumentException("Start time must be before end time.");
}
traceId = traceId ?? Guid.NewGuid();
var request = new FilteredTimeSeriesQueryRequest(
metricId,
samplingTypes,
dimensionFilters,
startTimeUtc,
endTimeUtc,
(int)seriesResolutionInMinutes,
aggregationType,
selectionClause.PropertyDefinition,
selectionClause.NumberOfResultsToReturn,
selectionClause.OrderBy,
false,
outputDimensionNames);
var url = string.Format(
"{0}{1}/v1/multiple/serializationVersion/{2}/maxCost/{3}?timeoutInSeconds={4}&returnRequestObjectOnFailure={5}",
this.connectionInfo.GetMetricsDataQueryEndpoint(request.MetricIdentifier.MonitoringAccount).OriginalString,
this.QueryServiceRelativeUrl,
FilteredTimeSeriesQueryResponse.CurrentVersion,
int.MaxValue,
(int)this.connectionInfo.Timeout.TotalSeconds,
false);
Tuple<string, HttpResponseMessage> response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
null, // TODO add support of monitoring account on server side and then pass it here
null, // TODO add support of monitoring account on server side and pass operation here
new[] { request },
this.clientId,
null,
traceId,
FilteredTimeSeriesQueryResponse.CurrentVersion).ConfigureAwait(false);
HttpResponseMessage httpResponseMessage = response.Item2;
if (httpResponseMessage.StatusCode != HttpStatusCode.OK)
{
throw new MetricsClientException($"Request failed with HTTP Status Code: {httpResponseMessage.StatusCode}. TraceId: {traceId}. Response: {response.Item1}");
}
return httpResponseMessage;
}
/// <summary>
/// Executes the given Kql-M query and returns results as a datatable.
/// Datatable is represented by JArray where each element represents a row in the query result.
///
/// For example:
/// Input Query for last 1 hour at 1 minute resolution:
/// metricNamespace("Metrics.Server").metric("ClientAggregatedMetricCount").dimensions("Datacenter") | project Average
/// Output (will contain one row for each datapoint):
/// [
/// {
/// "TimestampUtc": "02/04/2019 07:11:00",
/// "AccountName": "MetricTeamInternalMetrics",
/// "MetricNamespace": "Metrics.Server",
/// "MetricName": "ClientAggregatedMetricCount",
/// "Datacenter": "EastUS2",
/// "Average": 70.083851254134714
/// },
/// {
/// "TimestampUtc": "02/04/2019 07:12:00",
/// "AccountName": "MetricTeamInternalMetrics",
/// "MetricNamespace": "Metrics.Server",
/// "MetricName": "ClientAggregatedMetricCount",
/// "Datacenter": "EastUS2",
/// "Average": 67.305346411549351
/// }
/// ]
/// </summary>
/// <param name="traceId">Trace id for end to end tracing purposes.</param>
/// <param name="accountName">Account name for which query needs to be run.</param>
/// <param name="queryString">KQL-M query.</param>
/// <param name="startTimeUtc">Start time utc for the query.</param>
/// <param name="endTimeUtc">End time utc for the query.</param>
/// <returns>Result for query.</returns>
public async Task<JArray> ExecuteKqlMQueryAsync(
string traceId,
string accountName,
string queryString,
DateTime startTimeUtc,
DateTime endTimeUtc)
{
var queryLanguageRequest =
new KqlMRequest(accountName, "[N/A]", "[N/A]", startTimeUtc, endTimeUtc, queryString);
var url = string.Format(
"{0}{1}query/v2/language/monitoringAccount/{2}",
this.connectionInfo.GetEndpoint(accountName).OriginalString,
this.connectionInfo.GetAuthRelativeUrl(string.Empty),
accountName);
Guid traceIdGuid;
Guid? traceIdNullable = null;
if (Guid.TryParse(traceId, out traceIdGuid))
{
traceIdNullable = traceIdGuid;
}
var response = await HttpClientHelper.GetResponse(
url: new Uri(url),
method: HttpMethod.Post,
client: this.httpClient,
monitoringAccount: accountName,
operation: null,
httpContent: queryLanguageRequest,
clientId: this.clientId,
serializedContent: null,
traceId: traceIdNullable,
serializationVersion: FilteredTimeSeriesQueryResponse.CurrentVersion).ConfigureAwait(false);
using (HttpResponseMessage httpResponseMessage = response.Item2)
{
if (httpResponseMessage.StatusCode != HttpStatusCode.OK)
{
throw new MetricsClientException($"Request failed with HTTP Status Code: {httpResponseMessage.StatusCode}. TraceId: {traceId}. Response: {response.Item1}");
}
using (var stream = await httpResponseMessage.Content.ReadAsStreamAsync().ConfigureAwait(false))
{
return QueryLanguageResponseToDatatable.GetResponseAsTable(stream);
}
}
}
/// <summary>
/// Gets the time series values that match the filtering criteria.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="selectionClause">Reduce result to top N results of the query. By default, all results are returned.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series. By default, automatic resolution is used (same as Jarvis UI).</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution. 1 minute resolution (full resolution in MDM today) by default.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <param name="outputDimensionNames">The output dimension names.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.</param>
/// <returns>
/// Time series results matching the query criteria.
/// </returns>
public Task<IQueryResultListV3> GetTimeSeriesAsync(
MetricIdentifier metricId,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
IReadOnlyList<SamplingType> samplingTypes,
SelectionClauseV3 selectionClause = null,
AggregationType aggregationType = AggregationType.Automatic,
long seriesResolutionInMinutes = 1,
Guid? traceId = null,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false)
{
return this.GetFilteredDimensionValuesAsyncV3(
metricId,
dimensionFilters,
startTimeUtc,
endTimeUtc,
samplingTypes,
selectionClause,
aggregationType,
seriesResolutionInMinutes,
traceId,
outputDimensionNames,
lastValueMode);
}
/// <summary>
/// Returns all metric definitions for time series satisfying given set of filters in the given monitoring account.
/// </summary>
/// <remarks>
/// QOS metrics, composite metrics and wild card metrics are not included in the result set.
/// </remarks>
/// <param name="monitoringAccount">Monitoring account name.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <returns>
/// All metric definitions which has data for time series keys satisfy the given set of filters.
/// </returns>
public async Task<IReadOnlyList<MetricDefinitionV2>> GetMetricDefinitionsAsync(
string monitoringAccount,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
Guid? traceId = null)
{
if (string.IsNullOrEmpty(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount), "Monitoring account cannot be null or empty");
}
if (dimensionFilters == null || dimensionFilters.Count == 0)
{
throw new ArgumentNullException(nameof(dimensionFilters), "Dimension filters cannot be null or empty");
}
if (startTimeUtc > endTimeUtc)
{
throw new ArgumentException(string.Format("Start time cannot be greater than end time. StartTime:{0}, EndTime:{1}", startTimeUtc, endTimeUtc));
}
bool hasAtleastOneDimensionValueFilter = false;
foreach (var dimensionFilter in dimensionFilters)
{
if (dimensionFilter.DimensionValues.Count > 0)
{
hasAtleastOneDimensionValueFilter = true;
break;
}
}
if (!hasAtleastOneDimensionValueFilter)
{
throw new ArgumentException("Dimension filters need to have atleast one dimension with dimension values filters");
}
var dimensionNamesAndConstraints = GetDimensionNamesAndConstraints(dimensionFilters);
var url = string.Format(
"{0}{1}/metricDefinitions/monitoringAccount/{2}/startTimeUtcMillis/{3}/endTimeUtcMillis/{4}",
this.connectionInfo.GetEndpoint(monitoringAccount),
this.MetaDataRelativeUrl,
SpecialCharsHelper.EscapeTwice(monitoringAccount),
UnixEpochHelper.GetMillis(startTimeUtc),
UnixEpochHelper.GetMillis(endTimeUtc));
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
monitoringAccount,
this.MetaDataRelativeUrl,
dimensionNamesAndConstraints,
this.clientId,
traceId: traceId).ConfigureAwait(false);
var metricDefinitionsAsByteArray = await response.Item2.Content.ReadAsByteArrayAsync().ConfigureAwait(false);
using (var ms = new MemoryStream(metricDefinitionsAsByteArray))
{
using (var reader = new BinaryReader(ms, Encoding.UTF8))
{
reader.ReadByte();
List<string> tempList = new List<string>();
int numberOfMetadata = reader.ReadInt32();
var result = new List<MetricDefinitionV2>(numberOfMetadata);
for (int i = 0; i < numberOfMetadata; i++)
{
var monitoringAcct = reader.ReadString();
var metricNamespace = reader.ReadString();
var metricName = reader.ReadString();
var numOfDimensions = reader.ReadInt32();
for (int j = 0; j < numOfDimensions; j++)
{
tempList.Add(reader.ReadString());
}
var metadata = new MetricDefinitionV2(monitoringAcct, metricNamespace, metricName, tempList);
result.Add(metadata);
tempList.Clear();
}
return result;
}
}
}
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="definitions">The time series definitions.</param>
/// <param name="serializationVersion">The serialization version.</param>
/// <param name="returnMetricNames">if set to <c>true</c>, return metric names in response.</param>
/// <param name="traceId">The trace identifier.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
/// <exception cref="ArgumentNullException">If arguments provided incorrectly</exception>
/// <exception cref="ArgumentException">
/// The count of 'definitions' is 0.
/// or
/// At least one of definitions is null.
/// </exception>
public async Task<HttpResponseMessage> GetMultipleTimeSeriesAsync(
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions,
byte serializationVersion = MetricQueryResponseDeserializer.CurrentVersion,
bool returnMetricNames = false,
Guid? traceId = null)
{
if (definitions == null)
{
throw new ArgumentNullException(nameof(definitions));
}
var definitionArray = definitions.ToArray();
if (definitionArray.Length == 0)
{
throw new ArgumentException("The count of 'definitions' is 0.");
}
if (definitionArray.Any(d => d == null))
{
throw new ArgumentException("At least one of definitions is null.");
}
string operation = $"{this.DataRelativeUrl}/binary/version/{serializationVersion}";
string monitoringAccount = definitionArray[0].Id.MonitoringAccount;
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/returnMetricNames/{3}",
this.connectionInfo.GetMetricsDataQueryEndpoint(monitoringAccount).OriginalString,
operation,
monitoringAccount,
returnMetricNames);
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
monitoringAccount,
operation,
definitionArray,
this.clientId,
null,
traceId,
serializationVersion).ConfigureAwait(false);
return response.Item2;
}
/// <summary>
/// Gets time series filtered by dimension criteral and a Top N series condition.
/// </summary>
/// <param name="filteredQueryRequests">The filtered query requests.</param>
/// <param name="serializationVersion">The serialization version.</param>
/// <param name="maximumAllowedQueryCost">The maximum allowed query cost.</param>
/// <param name="traceId">The trace identifier.</param>
/// <param name="returnRequestObjectOnFailure">if set to <c>true</c> [return request object on failure].</param>
/// <returns>
/// The raw stream of query results.
/// </returns>
[Obsolete]
public async Task<HttpResponseMessage> GetFilteredTimeSeriesAsync(
IReadOnlyList<FilteredTimeSeriesQueryRequest> filteredQueryRequests,
byte serializationVersion,
long maximumAllowedQueryCost,
Guid traceId,
bool returnRequestObjectOnFailure)
{
if (filteredQueryRequests == null)
{
throw new ArgumentNullException(nameof(filteredQueryRequests));
}
if (filteredQueryRequests.Count == 0)
{
throw new ArgumentException("The count of 'filteredQueryRequests' is 0.");
}
if (filteredQueryRequests.Any(d => d == null))
{
throw new ArgumentException("At least one of filteredQueryRequests is null.");
}
var url = string.Format(
"{0}{1}/v1/multiple/serializationVersion/{2}/maxCost/{3}?timeoutInSeconds={4}&returnRequestObjectOnFailure={5}",
this.connectionInfo.GetMetricsDataQueryEndpoint(filteredQueryRequests[0].MetricIdentifier.MonitoringAccount).OriginalString,
this.QueryServiceRelativeUrl,
serializationVersion,
maximumAllowedQueryCost,
(int)this.connectionInfo.Timeout.TotalSeconds,
returnRequestObjectOnFailure);
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
null, // TODO add support of monitoring account on server side and then pass it here
null, // TODO add support of monitoring account on server side and pass operation here
filteredQueryRequests,
this.clientId,
null,
traceId,
serializationVersion).ConfigureAwait(false);
return response.Item2;
}
/// <summary>
/// Normalizes the time range.
/// </summary>
/// <param name="startTimeUtc">The start time in UTC.</param>
/// <param name="endTimeUtc">The end time in UTC.</param>
private static void NormalizeTimeRange(ref DateTime startTimeUtc, ref DateTime endTimeUtc)
{
startTimeUtc = new DateTime(startTimeUtc.Ticks / TimeSpan.TicksPerMinute * TimeSpan.TicksPerMinute);
endTimeUtc = new DateTime(endTimeUtc.Ticks / TimeSpan.TicksPerMinute * TimeSpan.TicksPerMinute);
}
/// <summary>
/// Get dimensions names and constraints from dimension filters.
/// </summary>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <returns>Dimension constraint</returns>
/// <exception cref="MetricsClientException">Only one filter can be specified for a dimension.</exception>
private static SortedDictionary<string, Tuple<bool, IReadOnlyList<string>>> GetDimensionNamesAndConstraints(IEnumerable<DimensionFilter> dimensionFilters)
{
var dimensionNamesAndConstraints =
new SortedDictionary<string, Tuple<bool, IReadOnlyList<string>>>(StringComparer.OrdinalIgnoreCase);
if (dimensionFilters != null)
{
foreach (var filter in dimensionFilters)
{
Tuple<bool, IReadOnlyList<string>> tuple;
if (filter.DimensionValues == null)
{
tuple = Tuple.Create(true, (IReadOnlyList<string>)EmptyStringArray);
}
else
{
tuple = Tuple.Create(filter.IsExcludeFilter, filter.DimensionValues);
}
if (dimensionNamesAndConstraints.ContainsKey(filter.DimensionName))
{
throw new MetricsClientException(
"Only one filter can be specified for a dimension. Another filter already exists for dimension: " +
filter.DimensionName);
}
dimensionNamesAndConstraints.Add(filter.DimensionName, tuple);
}
}
return dimensionNamesAndConstraints;
}
/// <summary>
/// Builds the dimension names and constraints for DQ query.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingType">Sampling type to use for this metric.</param>
/// <param name="reducer">The reducing function to apply to the time series.</param>
/// <param name="queryFilter">Filter criteria to enforce on the query.</param>
/// <param name="includeSeries">Indicate whether or not to include the raw time series data in the result.</param>
/// <param name="selectionClause">Reduce result to top N results of the query.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series.</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution.</param>
/// <param name="dimensionNamesAndConstraints">TODO: Comment</param>
/// <returns>Time series definitions matching the query criteria.</returns>
private NameValueCollection BuildQueryParameters(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
Reducer reducer,
QueryFilter queryFilter,
bool includeSeries,
SelectionClause selectionClause,
AggregationType aggregationType,
long seriesResolutionInMinutes,
out SortedDictionary<string, Tuple<bool, IReadOnlyList<string>>> dimensionNamesAndConstraints)
{
// There is an internal collection called HttpValueCollection for which the ToString() value
// correctly generates a query string. We can access this collection type by calling parse
// on an empty query string.
var query = HttpUtility.ParseQueryString(string.Empty);
var startTimeMillis = UnixEpochHelper.GetMillis(startTimeUtc);
var endTimeMillis = UnixEpochHelper.GetMillis(endTimeUtc);
if (startTimeMillis > endTimeMillis)
{
throw new ArgumentException("Start time must be before end time.");
}
if (queryFilter == null)
{
throw new ArgumentNullException("queryFilter");
}
if (reducer == Reducer.Undefined)
{
throw new ArgumentException("Reducer cannot not be undefined. Use QueryFilter.NoFilter to get all time series.");
}
if (!object.ReferenceEquals(queryFilter, QueryFilter.NoFilter))
{
if (queryFilter.Operator == Operator.Undefined)
{
throw new ArgumentException("Operator cannot not be undefined. Use QueryFilter.NoFilter to get all time series.");
}
}
query["SamplingType"] = samplingType.ToString();
query["startTime"] = startTimeMillis.ToString();
query["endTime"] = endTimeMillis.ToString();
query["includeSeries"] = includeSeries.ToString();
query["reducer"] = reducer.ToString();
if (includeSeries)
{
query["seriesAggregationType"] = aggregationType.ToString();
query["seriesResolution"] = (seriesResolutionInMinutes * MillisecondsPerMinute).ToString();
}
if (object.ReferenceEquals(queryFilter, QueryFilter.NoFilter))
{
query["noFilter"] = "true";
}
else
{
query["operator"] = queryFilter.Operator.ToString();
query["operand"] = queryFilter.Operand.ToString(CultureInfo.InvariantCulture);
}
if (selectionClause != null && !object.ReferenceEquals(selectionClause, SelectionClause.AllResults))
{
query["selectionType"] = selectionClause.SelectionType.ToString();
query["top"] = selectionClause.QuantityToSelect.ToString();
query["orderBy"] = selectionClause.OrderBy.ToString();
}
dimensionNamesAndConstraints = GetDimensionNamesAndConstraints(dimensionFilters);
return query;
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IReadOnlyHyperLogLogSketches.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System.Collections.Generic;
/// <summary>
/// Read-only interface for objects representing list of hyperloglog sketches.
/// </summary>
public interface IReadOnlyHyperLogLogSketches
{
/// <summary>
/// Gets the total sketches count.
/// </summary>
uint HyperLogLogSketchesCount { get; }
/// <summary>
/// Gets the list of sketches: ordered pairs of distinct count dimension name and HyperLogLogSketch.
/// </summary>
IEnumerable<KeyValuePair<string, HyperLogLogSketch>> HyperLogLogSketches
{
get;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IQueryResultV3.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System.Collections.Generic;
using Microsoft.Cloud.Metrics.Client.Metrics;
/// <summary>
/// Represents a single time series result, with one or more sampling types.
/// </summary>
public interface IQueryResultV3
{
/// <summary>
/// Set of valid dimension name-value pairs that meet the query condition.
/// </summary>
IReadOnlyList<KeyValuePair<string, string>> DimensionList { get; }
/// <summary>
/// Gets the evaluated value for this time series that meets the condition set in the query (provided for evidence and/or sorting).
/// </summary>
double EvaluatedResult { get; }
/// <summary>
/// Gets the time series values for the requested sampling type.
/// </summary>
/// <param name="samplingType">The sampling type requested.</param>
/// <returns>The array of datapoints for the requested sampling type/</returns>
/// <remarks>
/// double.NaN is the sentinel used to indicate there is no metric value.
/// </remarks>
double[] GetTimeSeriesValues(SamplingType samplingType);
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="RollupConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// Determine if rollup is enabled for this preaggregate.
/// </summary>
public sealed class RollupConfiguration : IRollupConfiguration
{
/// <summary>
/// RollupConfiguration where rollup is enabled.
/// </summary>
public static readonly RollupConfiguration RollupEnabled = new RollupConfiguration(true);
/// <summary>
/// RollupConfiguration where rollup is disabled.
/// </summary>
public static readonly RollupConfiguration RollupDisabled = new RollupConfiguration(false);
/// <summary>
/// Initializes a new instance of the <see cref="RollupConfiguration"/> class.
/// </summary>
/// <param name="enabled">Whether or not the feature is enabled.</param>
[JsonConstructor]
internal RollupConfiguration(bool enabled)
{
this.Enabled = enabled;
}
/// <summary>
/// Determines if the rollup is enabled or disabled.
/// </summary>
public bool Enabled { get; }
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IMetricConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
/// <summary>
/// Interface representing an MDM Metric.
/// </summary>
public interface IMetricConfiguration
{
/// <summary>
/// The namespace of the metric.
/// </summary>
string MetricNamespace { get; }
/// <summary>
/// The name of the metric.
/// </summary>
string Name { get; }
/// <summary>
/// The last updated time of the metric.
/// </summary>
DateTime LastUpdatedTime { get; }
/// <summary>
/// The last entity to update the metric.
/// </summary>
string LastUpdatedBy { get; }
/// <summary>
/// The version of the metric.
/// </summary>
uint Version { get; }
/// <summary>
/// Gets the description of the metric.
/// </summary>
string Description { get; }
}
}
<file_sep>// ------------------------------------------------------------------------------------------
// <copyright file="ClockType.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
// ReSharper disable UnusedMember.Global
/// <summary>
/// Types of clock that can be selected for the ETW session.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364160(v=vs.85).aspx"/>
internal enum ClockType
{
/// <summary>
/// The default clock type to be used by the session, it is equivalent of selecting PerformanceCounter value.
/// </summary>
Default = 0,
/// <summary>
/// Indicates that the session uses the OS performance counter, a.k.a.: QPC. The resolution is typically 1000 times
/// less than the CPU frequency of the box. It is the recommended way to collect high-resolution timestamps in Windows.
/// </summary>
Perf = 1,
/// <summary>
/// Indicates that the session uses the SystemTime clock (with actual resolution of ~15 milliseconds it is actually
/// the cheaper timestamp available to ETW, the downside is the lack of resolution).
/// </summary>
System = 2,
/// <summary>
/// Indicates that the session uses the CPU timestamp (RDTSC instruction to retrieve the TSC). It is the cheapest of
/// all with the higher resolution but not guaranteed to be in sync between different processors in the box.
/// </summary>
Cycle = 3,
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SelectionClause.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
/// <summary>
/// Ordering enumeration for Top N selection clause, default is descending.
/// </summary>
public enum OrderBy
{
/// <summary>
/// Ordering not specified. Selection clause is not considered valid.
/// </summary>
Undefined,
/// <summary>
/// Descending ordering for Top N selection clause, this is the default value.
/// </summary>
Descending,
/// <summary>
/// Ascending ordering for Top N selection clause.
/// </summary>
Ascending,
}
/// <summary>
/// The type of selection to perform.
/// </summary>
public enum SelectionType
{
/// <summary>
/// Selection type not specified. Selection clause is not considered valid.
/// </summary>
Undefined,
/// <summary>
/// Top N should return the top N values that meet the filter criteria
/// </summary>
TopValues,
/// <summary>
/// Top N should return the top N percent of values that meet the filter criteria
/// </summary>
TopPercent,
}
/// <summary>
/// This class represents the selection clause of the query
/// </summary>
public sealed class SelectionClause
{
/// <summary>
/// Selection clause to indicate that all results should be returned.
/// </summary>
public static readonly SelectionClause AllResults = new SelectionClause(SelectionType.Undefined, 0, OrderBy.Undefined);
/// <summary>
/// Initializes a new instance of the <see cref="SelectionClause"/> class.
/// </summary>
/// <param name="selectionType">Type of the selection clause.</param>
/// <param name="quantityToSelect">The quantity to select.</param>
/// <param name="orderBy">The ordering of the selection.</param>
public SelectionClause(SelectionType selectionType, int quantityToSelect, OrderBy orderBy)
{
this.SelectionType = selectionType;
this.QuantityToSelect = quantityToSelect;
this.OrderBy = orderBy;
}
/// <summary>
/// Gets the type of the selection clause.
/// </summary>
public SelectionType SelectionType { get; private set; }
/// <summary>
/// Gets the quantity to select.
/// </summary>
public int QuantityToSelect { get; private set; }
/// <summary>
/// Gets the ordering of the selection
/// </summary>
public OrderBy OrderBy { get; private set; }
/// <summary>
/// Determines whether the specified <see cref="object" />, is equal to this instance.
/// </summary>
/// <param name="obj">The <see cref="object" /> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="object" /> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object obj)
{
return this.Equals(obj as SelectionClause);
}
/// <summary>
/// Get hash code for this object.
/// </summary>
/// <returns>The hash code.</returns>
public override int GetHashCode()
{
return this.SelectionType.GetHashCode() ^ this.QuantityToSelect.GetHashCode() ^ this.OrderBy.GetHashCode();
}
/// <summary>
/// Compare this Selection Clause to the specified other clause.
/// </summary>
/// <param name="otherClause">The other clause.</param>
/// <returns>Result of the equality test.</returns>
private bool Equals(SelectionClause otherClause)
{
if (otherClause == null)
{
return false;
}
return this.SelectionType.Equals(otherClause.SelectionType)
&& this.QuantityToSelect.Equals(otherClause.QuantityToSelect)
&& this.OrderBy == otherClause.OrderBy;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="RawMetricConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using Metrics;
using Newtonsoft.Json;
using Online.Metrics.Serialization;
/// <summary>
/// Represents a raw metric in MDM.
/// </summary>
public sealed class RawMetricConfiguration : IRawMetricConfiguration
{
private readonly List<IPreaggregation> preaggregations;
private readonly List<IComputedSamplingTypeExpression> computedSamplingTypes;
private string description;
/// <summary>
/// Initializes a new instance of the <see cref="RawMetricConfiguration"/> class.
/// </summary>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="name">The name.</param>
/// <param name="lastUpdatedTime">The last updated time.</param>
/// <param name="lastUpdatedBy">The last updated by.</param>
/// <param name="version">The version.</param>
/// <param name="scalingFactor">The scaling factor.</param>
/// <param name="enableClientPublication">if set to <c>true</c> to enable client publication.</param>
/// <param name="enableClientForking">if set to <c>true</c> to enable client forking.</param>
/// <param name="description">The description of the metric.</param>
/// <param name="dimensions">The dimensions.</param>
/// <param name="preaggregations">The preaggregations.</param>
/// <param name="rawSamplingTypes">The raw sampling types.</param>
/// <param name="computedSamplingTypes">The computed sampling types.</param>
/// <param name="useClientSideLastSamplingMode">Whether or not to only use the last value written within the sample period.</param>
/// <param name="useClientSideEtwPublication">Whether or not the metric should be published to the aggregated ETW provider.</param>
[JsonConstructor]
internal RawMetricConfiguration(
string metricNamespace,
string name,
DateTime lastUpdatedTime,
string lastUpdatedBy,
uint version,
float? scalingFactor,
bool enableClientPublication,
bool enableClientForking,
string description,
IEnumerable<string> dimensions,
IEnumerable<IPreaggregation> preaggregations,
IEnumerable<SamplingType> rawSamplingTypes,
IEnumerable<IComputedSamplingTypeExpression> computedSamplingTypes,
bool useClientSideLastSamplingMode,
bool useClientSideEtwPublication)
{
this.MetricNamespace = metricNamespace;
this.Name = name;
this.LastUpdatedTime = lastUpdatedTime;
this.LastUpdatedBy = lastUpdatedBy;
this.Version = version;
this.ScalingFactor = scalingFactor;
this.EnableClientPublication = enableClientPublication;
this.EnableClientForking = enableClientForking;
this.Description = description;
this.Dimensions = dimensions;
this.preaggregations = preaggregations.ToList();
this.RawSamplingTypes = rawSamplingTypes;
this.computedSamplingTypes = computedSamplingTypes.ToList();
this.EnableClientSideLastSamplingMode = useClientSideLastSamplingMode;
this.EnableClientEtwPublication = useClientSideEtwPublication;
}
/// <summary>
/// The namespace of the metric.
/// </summary>
public string MetricNamespace { get; }
/// <summary>
/// The name of the metric.
/// </summary>
public string Name { get; }
/// <summary>
/// The last updated time of the metric.
/// </summary>
public DateTime LastUpdatedTime { get; }
/// <summary>
/// The last entity to update the metric.
/// </summary>
public string LastUpdatedBy { get; }
/// <summary>
/// The version of the metric.
/// </summary>
public uint Version { get; }
/// <summary>
/// Gets or sets the scaling factor.
/// </summary>
public float? ScalingFactor { get; set; }
/// <summary>
/// Gets or sets a value indicating whether client publication is enabled.
/// </summary>
public bool EnableClientPublication { get; set; }
/// <summary>
/// Gets or sets a value indicating whether client forking is enabled.
/// </summary>
public bool EnableClientForking { get; set; }
/// <summary>
/// Gets the description of the metric.
/// </summary>
public string Description
{
get
{
return this.description;
}
set
{
if (value != null && value.Length > SerializationConstants.MaximumMetricDescriptionLength)
{
throw new ArgumentOutOfRangeException(
nameof(value),
$"The metric description cannot be greater than {SerializationConstants.MaximumMetricDescriptionLength} characters.");
}
this.description = value;
}
}
/// <summary>
/// Gets the raw sampling types (Sum, Count, or legacy MetricsClient sampling types).
/// </summary>
public IEnumerable<SamplingType> RawSamplingTypes { get; }
/// <summary>
/// Gets the preaggregations of the metric.
/// </summary>
public IEnumerable<IPreaggregation> Preaggregations
{
get { return this.preaggregations; }
}
/// <summary>
/// Gets the dimensions of the metric.
/// </summary>
public IEnumerable<string> Dimensions { get; }
/// <summary>
/// Gets the computed sampling types.
/// </summary>
public IEnumerable<IComputedSamplingTypeExpression> ComputedSamplingTypes
{
get { return this.computedSamplingTypes; }
}
/// <summary>
/// Gets or sets a value indicating whether only the last value seen for a time series is perserved on the client.
/// </summary>
/// <value>
/// <c>true</c> if last sampling mode is used; otherwise, <c>false</c>.
/// </value>
/// <remarks>
/// Client side last sampling mode means that within the collection interval (1m) only the last value set to the metric is kept. This means Sum == Min == Max
/// and Count == 1 for this metric when it is sent to the server.
/// </remarks>
[JsonProperty]
public bool EnableClientSideLastSamplingMode { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the metric will be published to the aggregated ETW channel.
/// </summary>
public bool EnableClientEtwPublication { get; set; }
/// <summary>
/// Determines whether this instance can add preaggregation to the metric configuration.
/// </summary>
/// <param name="preaggregationToAdd">The preaggregation to add.</param>
/// <returns>
/// True if the preaggregation can be added.
/// </returns>
public bool CanAddPreaggregation(IPreaggregation preaggregationToAdd)
{
if (preaggregationToAdd == null)
{
throw new ArgumentNullException(nameof(preaggregationToAdd));
}
foreach (var preaggregation in this.preaggregations)
{
if (string.Equals(preaggregationToAdd.Name, preaggregation.Name, StringComparison.OrdinalIgnoreCase))
{
return false;
}
if (preaggregation.Dimensions.SequenceEqual(preaggregationToAdd.Dimensions, StringComparer.OrdinalIgnoreCase))
{
return false;
}
}
return true;
}
/// <summary>
/// Adds the preaggregate.
/// </summary>
/// <param name="preaggregate">The preaggregate.</param>
public void AddPreaggregation(IPreaggregation preaggregate)
{
if (!this.CanAddPreaggregation(preaggregate))
{
throw new ConfigurationValidationException("Duplicate preaggregates cannot be added.", ValidationType.DuplicatePreaggregate);
}
this.preaggregations.Add(preaggregate);
}
/// <summary>
/// Removes the preaggregate.
/// </summary>
/// <param name="preaggregateName">The name of the preaggregate to remove.</param>
public void RemovePreaggregation(string preaggregateName)
{
if (string.IsNullOrWhiteSpace(preaggregateName))
{
throw new ArgumentNullException(nameof(preaggregateName));
}
this.preaggregations.RemoveAll(x => string.Equals(x.Name, preaggregateName, StringComparison.OrdinalIgnoreCase));
}
/// <summary>
/// Adds the type of the computed sampling.
/// </summary>
/// <param name="computedSamplingType">Type of the computed sampling.</param>
public void AddComputedSamplingType(IComputedSamplingTypeExpression computedSamplingType)
{
if (this.computedSamplingTypes.Any(x => x.Name.Equals(computedSamplingType.Name, StringComparison.OrdinalIgnoreCase)))
{
throw new ConfigurationValidationException("Duplicate computed sampling types cannot be added.", ValidationType.DuplicateSamplingType);
}
this.computedSamplingTypes.Add(computedSamplingType);
}
/// <summary>
/// Removes the type of the computed sampling.
/// </summary>
/// <param name="computedSamplingTypeName">Name of the computed sampling type.</param>
public void RemoveComputedSamplingType(string computedSamplingTypeName)
{
if (string.IsNullOrWhiteSpace(computedSamplingTypeName))
{
throw new ArgumentNullException(nameof(computedSamplingTypeName));
}
for (var i = 0; i < this.computedSamplingTypes.Count; ++i)
{
if (string.Equals(computedSamplingTypeName, this.computedSamplingTypes[i].Name, StringComparison.OrdinalIgnoreCase))
{
if (this.computedSamplingTypes[i].IsBuiltIn)
{
throw new ConfigurationValidationException("Built in computed sampling types cannot be removed.", ValidationType.BuiltInTypeRemoved);
}
this.computedSamplingTypes.RemoveAt(i);
return;
}
}
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="MetricDeserializer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
/// <summary>
/// Class which deserializes Autopilot count (metric) data.
/// Serialization format corresponds to one described in http://sharepoint/sites/autopilot/team/Docs/Silicon/Monitroing%20Team/AD%20Metrics%20%20Autopilot%20Counter%20Data%20Common%20Serialization%20Format.docx.
/// </summary>
/// <typeparam name="TMetadata">Type of metadata object used for deserialization.</typeparam>
public sealed class MetricDeserializer<TMetadata>
where TMetadata : IMetricMetadata
{
private const ushort MaxVersion = 5;
private const uint TypeSerializerFlags = 0x12020000; // Corresponds to 0001.001.0000.0001.00000000000000000 (Use string and metadata interning with variable-length integer serialization)
private readonly List<string> stringsDictionary = new List<string>();
private readonly List<TMetadata> metadataDictionary = new List<TMetadata>();
private readonly List<KeyValuePair<ulong, uint>> histogramBuffer = new List<KeyValuePair<ulong, uint>>();
/// <summary>
/// Validates the data packet by CRC check
/// </summary>
/// <param name="dataPacket">Data packet to check.</param>
/// <exception cref="CrcCheckFailedSerializationException">
/// Throws when CRC check fails.
/// </exception>
public static void ValidateCrc(byte[] dataPacket)
{
var version = (ushort)(dataPacket[0] | dataPacket[1] << 8);
if (version < 5)
{
// No CRC is added for versions less than 5.
return;
}
var crc = (uint)(dataPacket[2] | dataPacket[3] << 8 | dataPacket[4] << 16 | dataPacket[5] << 24);
var computedCrc = Crc.ComputeCrc(0, dataPacket, 6);
if (crc != computedCrc)
{
throw new CrcCheckFailedSerializationException($"Crc check failed. Computed CRC : {crc}, Packet CRC: {computedCrc}");
}
}
/// <summary>
/// Clears the deserializer state.
/// </summary>
public void Clear()
{
this.stringsDictionary.Clear();
this.metadataDictionary.Clear();
this.histogramBuffer.Clear();
}
/// <summary>
/// Deserializes counter (metric) data from the stream and adds all objects to provided collection.
/// </summary>
/// <param name="stream">Stream from which data should be deserialized. Stream should be readable and provide randon access.</param>
/// <param name="metricBuilder">An object responsible for creation and further consumption of deserialized data.</param>
/// <param name="maxMetricStringsLength">Maximum length of strings, which represent metric name, dimension names and values.</param>
/// <param name="maxMetricNamespaceStringsLength">Maximum length of metric namespace string.</param>
/// <param name="maxMetricDimensionValueStringsLength">Maximum length of metric dimension value string.</param>
public void Deserialize(Stream stream, IMetricBuilder<TMetadata> metricBuilder, int maxMetricStringsLength, int maxMetricNamespaceStringsLength, int maxMetricDimensionValueStringsLength)
{
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException(@"Stream should be readable and provide random access.", nameof(stream));
}
try
{
using (var reader = new NoCloseBinaryReader(stream, Encoding.UTF8))
{
var startStreamPosition = stream.Position;
// Read version and type serializers info
var version = reader.ReadUInt16();
if (version > MaxVersion)
{
throw new VersionNotSupportedMetricSerializationException(
string.Format(
CultureInfo.InvariantCulture,
"Version is not supported. Read version:{0}, Max version:{1}.",
version,
MaxVersion));
}
if (version >= 5)
{
// Read CRC. CRC check is done in upper layers.
reader.ReadUInt32();
}
if (reader.ReadUInt32() != TypeSerializerFlags)
{
throw new VersionNotSupportedMetricSerializationException("Type serializers not supported.");
}
metricBuilder.SetSerializationVersion(version);
// Read strings
var deserializerDataPosition = stream.Position;
stream.Position += sizeof(long);
stream.Position = startStreamPosition + reader.ReadInt64();
var count = SerializationUtils.ReadUInt32FromBase128(reader);
for (uint i = 0; i < count; ++i)
{
this.stringsDictionary.Add(reader.ReadString());
}
// Read metrics metadata
stream.Position = deserializerDataPosition;
stream.Position = startStreamPosition + reader.ReadInt64();
count = SerializationUtils.ReadUInt32FromBase128(reader);
for (uint i = 0; i < count; ++i)
{
this.metadataDictionary.Add(this.ReadMetricMetadata(reader, metricBuilder, version, maxMetricStringsLength, maxMetricNamespaceStringsLength));
}
// Read metrics data
stream.Position = deserializerDataPosition + (2 * sizeof(long));
this.ReadMetricsData(reader, metricBuilder, version, maxMetricStringsLength, maxMetricNamespaceStringsLength, maxMetricDimensionValueStringsLength);
}
}
catch (IOException ioException)
{
throw new MetricSerializationException("Failed to deserialize data. Problem with input stream.", ioException);
}
catch (Exception exception)
{
throw new MetricSerializationException("Failed to deserialize data. Likely the incoming stream contains corrupted data.", exception);
}
finally
{
this.metadataDictionary.Clear();
this.stringsDictionary.Clear();
this.histogramBuffer.Clear();
}
}
private List<KeyValuePair<ulong, uint>> ReadHistogram(BinaryReader reader, ushort version)
{
SerializationUtils.ReadHistogramTo(this.histogramBuffer, reader, version > 3);
return this.histogramBuffer;
}
private void ReadMetricsData(BinaryReader reader, IMetricBuilder<TMetadata> metricBuilder, ushort version, int maxMetricStringsLength, int maxMetricNamespaceStringsLength, int maxMetricDimensionValueStringsLength)
{
long packetTime = 0;
if (version >= 5)
{
packetTime = (long)SerializationUtils.ReadUInt64FromBase128(reader);
}
// Versions before 2 used variable number of bytes to write number of serialized metrics data.
// From version 2 passing IEnumerable<IReadOnlyMetric> is supported, thus number of metrics data
// is unknown beforehand and we cannot use variable number anymore. Thus fixed 4 bytes uint is used.
var count = version >= 2 ? reader.ReadUInt32() : SerializationUtils.ReadUInt32FromBase128(reader);
for (var i = 0; i < count; ++i)
{
metricBuilder.BeginMetricCreation();
var metadata = this.ReadMetricMetadataByIndex(reader);
metricBuilder.AssignMetadata(metadata);
// In versions 0-2 Monitoring Account and Metric Namespace was part of the Metric data
// From version 3 Monitoring Account is removed and Metric Namespace became a part of Metric Metadata
if (version < 3)
{
var monitoringAccount = this.ReadStringByIndex(reader);
if (monitoringAccount.Length > maxMetricStringsLength)
{
throw new MetricSerializationException($"Monitoring account string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricStringsLength}, Value:{monitoringAccount}.", null);
}
var metricNamespace = this.ReadStringByIndex(reader);
if (metricNamespace.Length > maxMetricNamespaceStringsLength)
{
throw new MetricSerializationException($"Namespace string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricNamespaceStringsLength}, Value:{metricNamespace}.", null);
}
metricBuilder.AssignMonitoringAccount(monitoringAccount);
metricBuilder.AssignNamespace(metricNamespace);
}
if (version == 0)
{
// Skip event id
this.ReadStringByIndex(reader);
}
if (version >= 5)
{
var timeInTicks = (packetTime - SerializationUtils.ReadInt64FromBase128(reader)) * SerializationUtils.OneMinuteInterval;
metricBuilder.AssignTimeUtc(new DateTime(timeInTicks, DateTimeKind.Utc));
}
else
{
metricBuilder.AssignTimeUtc(new DateTime((long)SerializationUtils.ReadUInt64FromBase128(reader), DateTimeKind.Utc));
}
for (var j = 0; j < metadata.DimensionsCount; ++j)
{
var dimensionValue = this.ReadStringByIndex(reader);
if (dimensionValue.Length > maxMetricDimensionValueStringsLength)
{
throw new MetricSerializationException($"Dimension value string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricDimensionValueStringsLength}, Value:{dimensionValue}.", null);
}
metricBuilder.AddDimensionValue(dimensionValue);
}
var samplingTypes = (SamplingTypes)SerializationUtils.ReadUInt32FromBase128(reader);
metricBuilder.AssignSamplingTypes(samplingTypes);
if ((samplingTypes & SamplingTypes.Min) != 0)
{
metricBuilder.AssignMin(SerializationUtils.ReadUInt64FromBase128(reader));
}
if ((samplingTypes & SamplingTypes.Max) != 0)
{
metricBuilder.AssignMax(SerializationUtils.ReadUInt64FromBase128(reader));
}
if ((samplingTypes & SamplingTypes.Sum) != 0)
{
metricBuilder.AssignSum(SerializationUtils.ReadUInt64FromBase128(reader));
}
if ((samplingTypes & SamplingTypes.Count) != 0)
{
metricBuilder.AssignCount(SerializationUtils.ReadUInt32FromBase128(reader));
}
if ((samplingTypes & SamplingTypes.SumOfSquareDiffFromMean) != 0)
{
var sumOfSquareDiffFromMean = reader.ReadDouble();
metricBuilder.AssignSumOfSquareDiffFromMean(sumOfSquareDiffFromMean);
}
if ((samplingTypes & SamplingTypes.Histogram) != 0)
{
metricBuilder.AssignHistogram(this.ReadHistogram(reader, version));
}
if ((samplingTypes & SamplingTypes.HyperLogLogSketch) != 0)
{
var sizeOfHyperLogLogSketches = reader.ReadInt32();
metricBuilder.AssignHyperLogLogSketch(reader, sizeOfHyperLogLogSketches);
}
metricBuilder.EndMetricCreation();
}
}
private string ReadStringByIndex(BinaryReader reader)
{
var index = (int)SerializationUtils.ReadUInt32FromBase128(reader);
return this.stringsDictionary[index];
}
private TMetadata ReadMetricMetadataByIndex(BinaryReader reader)
{
var index = (int)SerializationUtils.ReadUInt32FromBase128(reader);
return this.metadataDictionary[index];
}
private TMetadata ReadMetricMetadata(BinaryReader reader, IMetricBuilder<TMetadata> metricBuilder, ushort version, int maxMetricStringsLength, int maxMetricNamespaceStringsLength)
{
var metricNamespace = string.Empty;
// In versions 0-2 Metric Namespace was part of the Metric data, from version 3 it became a part of Metric Metadata
if (version >= 3)
{
metricNamespace = this.ReadStringByIndex(reader);
if (metricNamespace.Length > maxMetricNamespaceStringsLength)
{
throw new MetricSerializationException($"Namespace string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricNamespaceStringsLength}, Value:{metricNamespace}.", null);
}
}
var metricName = this.ReadStringByIndex(reader);
if (metricName.Length > maxMetricStringsLength)
{
throw new MetricSerializationException($"Metric name string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricStringsLength}, Value:{metricName}.", null);
}
var count = SerializationUtils.ReadUInt32FromBase128(reader);
var dimensionNames = new List<string>((int)count);
for (var i = 0; i < count; ++i)
{
var dimensionName = this.ReadStringByIndex(reader);
if (dimensionName.Length > maxMetricStringsLength)
{
throw new MetricSerializationException($"Dimension name string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricStringsLength}, Value:{dimensionName}.", null);
}
dimensionNames.Add(dimensionName);
}
return metricBuilder.CreateMetadata(metricNamespace, metricName, dimensionNames);
}
/// <summary>
/// No close binary reader.
/// </summary>
public class NoCloseBinaryReader : BinaryReader
{
/// <summary>
/// Initializes a new instance of the <see cref="NoCloseBinaryReader"/> class.
/// </summary>
/// <param name="stream">The stream.</param>
/// <param name="encoding">The encoding.</param>
public NoCloseBinaryReader(Stream stream, Encoding encoding)
: base(stream, encoding)
{
}
/// <inheritdoc />
protected override void Dispose(bool disposing)
{
this.BaseStream.Flush();
base.Dispose(false);
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IReadOnlyMetric.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// <author email="selavrin">
// <NAME>
// </author>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.IO;
using global::Metrics.Services.Common.BlobSegment;
/// <summary>
/// Read only interface for metric data.
/// </summary>
/// TODO: Once all publishers and recievers are moved to version 3 of serialization remove MonitoringAccount and MetricNamespace from here
public interface IReadOnlyMetric
{
/// <summary>
/// Gets the Monitoring Account to which this metric is reported.
/// </summary>
string MonitoringAccount { get; }
/// <summary>
/// Gets the metric namespace.
/// </summary>
string MetricNamespace { get; }
/// <summary>
/// Gets the time when metric was reported.
/// </summary>
DateTime TimeUtc { get; }
/// <summary>
/// Gets the metric metadata.
/// </summary>
IMetricMetadata MetricMetadata { get; }
/// <summary>
/// Gets the sampling types this metric contains.
/// </summary>
SamplingTypes SamplingTypes { get; }
/// <summary>
/// Gets the number of samples for which this metric is reported.
/// </summary>
uint Count { get; }
/// <summary>
/// Gets the minimum value of samples reported this metric.
/// </summary>
ulong Min { get; }
/// <summary>
/// Gets the maximum value of samples reported this metric.
/// </summary>
ulong Max { get; }
/// <summary>
/// Gets the sum of sample values reported this metric.
/// </summary>
ulong Sum { get; }
/// <summary>
/// Gets the sum of squares differences from the mean for the sample values reported this metric.
/// </summary>
double SumOfSquareDiffFromMean { get; }
/// <summary>
/// Gets the minimum value of samples reported this metric.
/// </summary>
MetricValueV2 MinUnion { get; }
/// <summary>
/// Gets the maximum value of samples reported this metric.
/// </summary>
MetricValueV2 MaxUnion { get; }
/// <summary>
/// Gets the sum of sample values reported this metric.
/// </summary>
MetricValueV2 SumUnion { get; }
/// <summary>
/// Gets the histogram created from sample values reported for this metric.
/// </summary>
IReadOnlyHistogram Histogram { get; }
/// <summary>
/// Gets the tdigest created from sample values reported for this metric.
/// </summary>
IReadOnlyTDigest TDigest { get; }
/// <summary>
/// Gets the hyperloglog sketches from this metric.
/// </summary>
IReadOnlyHyperLogLogSketches HyperLogLogSketches { get; }
/// <summary>
/// Gets the hyperloglog sketches stream.
/// </summary>
/// <remarks>
/// This will be null if HyperLogLogSketches is set and vice-versa.
/// </remarks>
Stream HyperLogLogSketchesStream { get; }
/// <summary>
/// Gets the value of the dimension by dimension index.
/// Implementer of this interface has to make sure that number of dimensions specified in MetricMetadata be equal to number of dimension values.
/// </summary>
/// <param name="dimensionIndex">Index of the dimension for which to get value in 0..MetricMetadata.DimensionsCount range.</param>
/// <returns>Value of the dimension.</returns>
string GetDimensionValue(int dimensionIndex);
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="MetricValueV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
/// <summary>
/// Represents metric value as union of Long, Ulong, Double.
/// </summary>
[StructLayout(LayoutKind.Explicit)]
public struct MetricValueV2
{
/// <summary>
/// The value as double datatype.
/// </summary>
[FieldOffset(0)]
public double ValueAsDouble;
/// <summary>
/// The value as long datatype.
/// </summary>
[FieldOffset(0)]
public long ValueAsLong;
/// <summary>
/// The value as unsigned long datatype.
/// </summary>
[FieldOffset(0)]
public ulong ValueAsULong;
public static explicit operator MetricValueV2(ulong v)
{
return new MetricValueV2 { ValueAsULong = v };
}
public static explicit operator MetricValueV2(long v)
{
return new MetricValueV2 { ValueAsLong = v };
}
public static explicit operator MetricValueV2(int v)
{
return new MetricValueV2 { ValueAsLong = v };
}
public static explicit operator MetricValueV2(uint v)
{
return new MetricValueV2 { ValueAsLong = v };
}
public static explicit operator MetricValueV2(double v)
{
return new MetricValueV2 { ValueAsDouble = v };
}
/// <summary>
/// Determines whether this instance value for double can be represented as long
/// </summary>
/// <returns>True if the value can be represented as long.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool CanRepresentDoubleAsLong()
{
// ReSharper disable once CompareOfFloatsByEqualityOperator
return this.ValueAsDouble - (long)this.ValueAsDouble == 0;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ConnectionInfo.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Sockets;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
using Logging;
using Newtonsoft.Json;
using Utility;
/// <summary>
/// Enumeration to distinguish the MDM environments that will be used for the connections.
/// </summary>
public enum MdmEnvironment
{
/// <summary>
/// Uses the MDM/Jarvis production environments.
/// </summary>
Production,
/// <summary>
/// Uses the MDM/Jarvis INT environments.
/// </summary>
Int
}
/// <summary>
/// The connection information used to connect to metrics backend.
/// </summary>
public sealed class ConnectionInfo
{
/// <summary>
/// Possible part of operation URI.
/// </summary>
public const string CertApiFirstSegment = "/api/";
/// <summary>
/// Possible part of operation URI.
/// </summary>
public const string UserApiFirstSegment = "/user-api/";
/// <summary>
/// The default timeout.
/// </summary>
internal static readonly TimeSpan DefaultTimeout = TimeSpan.FromSeconds(100);
/// <summary>
/// There just two types of global MDM environment: INT and PROD. It must match the values defined for <see cref="MdmEnvironment"/>.
/// </summary>
private const int NumberOfMdmGlobalEnvironments = 2;
/// <summary>
/// The minimum timeout.
/// </summary>
private static readonly TimeSpan MinTimeout = TimeSpan.FromSeconds(30);
/// <summary>
/// The maximum timeout.
/// </summary>
private static readonly TimeSpan MaxTimeout = TimeSpan.FromSeconds(300);
/// <summary>
/// The log identifier for the class.
/// </summary>
private static readonly object LogId = Logger.CreateCustomLogId("ConnectionInfo");
/// <summary>
/// Maps the accounts to the account stamp information.
/// </summary>
private static readonly ConcurrentDictionary<string, StampInformation>[] AccountToUriMaps = new ConcurrentDictionary<string, StampInformation>[NumberOfMdmGlobalEnvironments];
/// <summary>
/// Maps the GSLB to the respective Uri object according to the environment type being targeted. This avoids duplication of URIs objects
/// if the same instance is used to connect to many different endpoints, for most usages it is expected to carry a single value.
/// </summary>
private static readonly ConcurrentDictionary<string, Uri>[] GslbToUris = new ConcurrentDictionary<string, Uri>[NumberOfMdmGlobalEnvironments];
/// <summary>
/// The host to IP URI map.
/// </summary>
private static readonly ConcurrentDictionary<Uri, Uri> HostToIpUriMap = new ConcurrentDictionary<Uri, Uri>();
/// <summary>
/// Locks to control initialiation of statics related to a global environment.
/// </summary>
private static readonly object GlobalEnvironmentInitializationLock = new object();
/// <summary>
/// The timer to refresh accounts' home stamps.
/// </summary>
private static readonly Timer TimerToRefreshHomeStamp;
/// <summary>
/// The timer to refresh IP address.
/// </summary>
private static readonly Timer TimerToRefreshIpAddress;
/// <summary>
/// An HTTP client without authentication.
/// </summary>
private static readonly HttpClient HttpClientWithoutAuthentication = HttpClientHelper.CreateHttpClient(DefaultTimeout);
/// <summary>
/// The paths to the known global environments, its order must match the enum <see cref="MdmEnvironment"/> used to identify the desired
/// global environment.
/// </summary>
private static volatile string[] globalEnvironments;
/// <summary>
/// The MDM environment being used, only makes sense if no <see cref="Endpoint"/> was specified in the construction of the instance.
/// </summary>
private readonly int mdmEnvironmentMapIndex;
/// <summary>
/// The certificate used to authenticate with MDM.
/// </summary>
private X509Certificate2 certificate;
/// <summary>
/// The thumbprint of the certificate used to authenticate with MDM.
/// </summary>
private string certificateThumbprint;
/// <summary>
/// Flag to tell if a global endpoint is specified.
/// </summary>
private bool? isGlobalEndpoint;
/// <summary>
/// Initializes static members of the <see cref="ConnectionInfo"/> class.
/// </summary>
static ConnectionInfo()
{
// Prefer TLS 1.2
ServicePointManager.SecurityProtocol &= ~SecurityProtocolType.Ssl3;
ServicePointManager.SecurityProtocol |= SecurityProtocolType.Tls | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls12;
for (int index = 0; index < NumberOfMdmGlobalEnvironments; index++)
{
GslbToUris[index] = new ConcurrentDictionary<string, Uri>(StringComparer.OrdinalIgnoreCase);
AccountToUriMaps[index] = new ConcurrentDictionary<string, StampInformation>(StringComparer.OrdinalIgnoreCase);
}
TimerToRefreshIpAddress = new Timer(state => RefreshIpAddresses(), null, DnsResolutionUpdateFrequency, System.Threading.Timeout.InfiniteTimeSpan);
TimerToRefreshHomeStamp = new Timer(state => RefreshAccountHomeStamp(), null, HomeStampAutomaticUpdateFrequency, System.Threading.Timeout.InfiniteTimeSpan);
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="endpoint">The endpoint.</param>
/// <param name="certificateThumbprint">The certificate thumbprint.</param>
public ConnectionInfo(Uri endpoint, string certificateThumbprint)
: this(endpoint, certificateThumbprint, StoreLocation.LocalMachine)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class.
/// </summary>
/// <param name="certificateThumbprint">The certificate thumbprint.</param>
/// <param name="mdmEnvironment">The to be targeted by the instance.</param>
public ConnectionInfo(string certificateThumbprint, MdmEnvironment mdmEnvironment = MdmEnvironment.Production)
: this(null, certificateThumbprint, StoreLocation.LocalMachine, null, DefaultTimeout, mdmEnvironment)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="endpoint">The endpoint.</param>
/// <param name="certificate">The certificate used to authenticate with MDM.</param>
public ConnectionInfo(Uri endpoint, X509Certificate2 certificate)
: this(endpoint, null, StoreLocation.LocalMachine, certificate, DefaultTimeout)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo" /> class.
/// </summary>
/// <param name="endpoint">The endpoint of metrics backend servers.</param>
/// <param name="certificate">The certificate used to authenticate with MDM.</param>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
public ConnectionInfo(Uri endpoint, X509Certificate2 certificate, TimeSpan timeout)
: this(endpoint, null, StoreLocation.LocalMachine, certificate, timeout)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="endpoint">The endpoint.</param>
public ConnectionInfo(Uri endpoint)
: this(endpoint, null, StoreLocation.LocalMachine, null, DefaultTimeout)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
/// <param name="endpoint">The endpoint.</param>
public ConnectionInfo(TimeSpan timeout, Uri endpoint)
: this(endpoint, null, StoreLocation.LocalMachine, null, timeout)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class.
/// </summary>
/// <param name="certificate">The certificate used to authenticate with MDM.</param>
/// <param name="mdmEnvironment">The global environment to be targeted by the instance.</param>
public ConnectionInfo(X509Certificate2 certificate, MdmEnvironment mdmEnvironment = MdmEnvironment.Production)
: this(null, null, StoreLocation.LocalMachine, certificate, DefaultTimeout, mdmEnvironment)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class.
/// </summary>
/// <param name="mdmEnvironment">The global environment to be targeted by the instance.</param>
public ConnectionInfo(MdmEnvironment mdmEnvironment = MdmEnvironment.Production)
: this(null, null, StoreLocation.LocalMachine, null, DefaultTimeout, mdmEnvironment)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class.
/// </summary>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
/// <param name="mdmEnvironment">The global environment to be targeted by the instance.</param>
public ConnectionInfo(TimeSpan timeout, MdmEnvironment mdmEnvironment = MdmEnvironment.Production)
: this(null, null, StoreLocation.LocalMachine, null, timeout, mdmEnvironment)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="endpoint">The endpoint of metrics backend servers.</param>
/// <param name="certificateThumbprint">The thumbprint of the certificate used to publish metrics data to the MDM.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
public ConnectionInfo(Uri endpoint, string certificateThumbprint, StoreLocation certificateStoreLocation)
: this(endpoint, certificateThumbprint, certificateStoreLocation, DefaultTimeout)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo"/> class.
/// </summary>
/// <param name="certificateThumbprint">The thumbprint of the certificate used to publish metrics data to the MDM.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
/// <param name="mdmEnvironment">The global environment to be targeted by the instance.</param>
public ConnectionInfo(string certificateThumbprint, StoreLocation certificateStoreLocation, MdmEnvironment mdmEnvironment = MdmEnvironment.Production)
: this(null, certificateThumbprint, certificateStoreLocation, null, DefaultTimeout, mdmEnvironment)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo" /> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="endpoint">The endpoint of metrics backend servers.</param>
/// <param name="certificateThumbprint">The thumbprint of the certificate used to publish metrics data to the MDM.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
public ConnectionInfo(Uri endpoint, string certificateThumbprint, StoreLocation certificateStoreLocation, TimeSpan timeout)
: this(endpoint, certificateThumbprint, certificateStoreLocation, null, timeout)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo" /> class.
/// </summary>
/// <param name="certificateThumbprint">The thumbprint of the certificate used to publish metrics data to the MDM.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
/// <param name="mdmEnvironment">The global environment to be targeted by the instance.</param>
public ConnectionInfo(string certificateThumbprint, StoreLocation certificateStoreLocation, TimeSpan timeout, MdmEnvironment mdmEnvironment = MdmEnvironment.Production)
: this(null, certificateThumbprint, certificateStoreLocation, null, timeout, mdmEnvironment)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo" /> class for the special case of explicitly fixing the endpoint to be used.
/// </summary>
/// <param name="endpoint">The endpoint of metrics backend servers.</param>
/// <param name="certificateThumbprint">The thumbprint of the certificate used to publish metrics data to the MDM.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
/// <param name="certificate">The certificate used to authenticate with MDM.</param>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
/// <exception cref="ArgumentNullException">
/// endpoint
/// or
/// certificateThumbprint
/// </exception>
private ConnectionInfo(Uri endpoint, string certificateThumbprint, StoreLocation certificateStoreLocation, X509Certificate2 certificate, TimeSpan timeout)
: this(endpoint, certificateThumbprint, certificateStoreLocation, certificate, timeout, MdmEnvironment.Production)
{
if (endpoint == null)
{
throw new ArgumentNullException(nameof(endpoint));
}
}
/// <summary>
/// Initializes a new instance of the <see cref="ConnectionInfo" /> class.
/// </summary>
/// <param name="endpoint">The endpoint of metrics backend servers.</param>
/// <param name="certificateThumbprint">The thumbprint of the certificate used to publish metrics data to the MDM.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
/// <param name="certificate">The certificate used to authenticate with MDM.</param>
/// <param name="timeout">The time to wait before the request times out. The timeout value can range from 30 seconds to 5 minutes.</param>
/// <param name="mdmEnvironment">The global environment to be targeted by the instance.</param>
/// <exception cref="ArgumentNullException">
/// endpoint
/// or
/// certificateThumbprint
/// </exception>
private ConnectionInfo(
Uri endpoint,
string certificateThumbprint,
StoreLocation certificateStoreLocation,
X509Certificate2 certificate,
TimeSpan timeout,
MdmEnvironment mdmEnvironment)
{
if (certificate != null && !string.IsNullOrWhiteSpace(certificateThumbprint))
{
throw new ArgumentException($"Either {nameof(certificate)} or {nameof(certificateThumbprint)} can be specified, but not both.");
}
if (timeout < MinTimeout || timeout > MaxTimeout)
{
throw new ArgumentException($"The timeout value for a request must be between {MinTimeout} and {MaxTimeout}.");
}
if (certificate == null && string.IsNullOrWhiteSpace(certificateThumbprint))
{
this.UseAadUserAuthentication = true;
}
Logger.Log(
LoggerLevel.Info,
LogId,
"Created",
"A new connection was created. Endpoint:{0}, CertThumbprint:{1}, CertStore:{2}, TimeoutMs:{3}",
endpoint,
certificate != null ? certificate.Thumbprint : certificateThumbprint,
certificateStoreLocation,
timeout.TotalMilliseconds);
this.Endpoint = endpoint;
this.CertificateThumbprint = certificateThumbprint;
this.CertificateStoreLocation = certificateStoreLocation;
this.Certificate = certificate;
this.Timeout = timeout;
if (mdmEnvironment != MdmEnvironment.Production && mdmEnvironment != MdmEnvironment.Int)
{
throw new ArgumentException($"The parameter {nameof(mdmEnvironment)} has an invalid value {mdmEnvironment}");
}
this.mdmEnvironmentMapIndex = (int)mdmEnvironment;
}
/// <summary>
/// Gets or sets the frequency in which the automatic refresh of home stamps is going to be performed (if any refresh is being performed).
/// </summary>
public static TimeSpan HomeStampAutomaticUpdateFrequency { get; set; } = TimeSpan.FromMinutes(10);
/// <summary>
/// Gets or sets the frequency in which the automatic refresh of home stamp host to IP map is going to be performed.
/// </summary>
public static TimeSpan DnsResolutionUpdateFrequency { get; set; } = TimeSpan.FromSeconds(4);
/// <summary>
/// Gets a value indicating whether to use AAD user authentication.
/// </summary>
public bool UseAadUserAuthentication { get; }
/// <summary>
/// Gets a value indicating whether this instance is global endpoint.
/// </summary>
public bool IsGlobalEndpoint
{
get
{
if (this.Endpoint == null)
{
return false;
}
if (this.isGlobalEndpoint.HasValue)
{
return this.isGlobalEndpoint.Value;
}
if (globalEnvironments == null)
{
throw new MetricsClientException("The global environments haven't been resolved yet.");
}
foreach (var ge in globalEnvironments)
{
if (this.Endpoint.Host.Equals(new Uri(ge).Host, StringComparison.OrdinalIgnoreCase))
{
this.isGlobalEndpoint = true;
break;
}
}
if (this.isGlobalEndpoint != true)
{
this.isGlobalEndpoint = false;
}
return this.isGlobalEndpoint.Value;
}
}
/// <summary>
/// Gets or sets the additional default request headers used for internal testing.
/// </summary>
public Dictionary<string, string> AdditionalDefaultRequestHeaders { get; set; }
/// <summary>
/// Gets the endpoint pointed by this instance, typically null, only not null when using an instance .
/// </summary>
public Uri Endpoint { get; }
/// <summary>
/// Gets the certificate used to authenticate with MDM.
/// </summary>
public X509Certificate2 Certificate
{
get
{
if (this.certificate == null)
{
this.certificate = CertificateHelper.FindAndValidateCertificate(this.CertificateThumbprint, this.CertificateStoreLocation);
}
return this.certificate;
}
private set
{
this.certificate = value;
}
}
/// <summary>
/// Gets the certificate thumbprint.
/// </summary>
public string CertificateThumbprint
{
get
{
if (this.certificate != null)
{
this.certificateThumbprint = this.certificate.Thumbprint;
}
return this.certificateThumbprint;
}
private set
{
this.certificateThumbprint = value;
}
}
/// <summary>
/// Gets the MDM stamp being used by the connection object.
/// </summary>
public MdmEnvironment MdmEnvironment
{
get
{
if (this.Endpoint != null)
{
throw new InvalidOperationException($"Endpoint was specified during construction of instance, this instance operates only against that given endpoint {this.Endpoint}.");
}
return (MdmEnvironment)this.mdmEnvironmentMapIndex;
}
}
/// <summary>
/// Gets the certificate store location.
/// </summary>
public StoreLocation CertificateStoreLocation { get; private set; }
/// <summary>
/// Gets the time to wait before the request times out.
/// </summary>
public TimeSpan Timeout { get; private set; }
/// <summary>
/// Gets or sets a value indicating whether to disable DNS resolution for unit test.
/// </summary>
internal static bool DisableDnsResolutionForUnitTest { get; set; }
/// <summary>
/// Gets the endpoint for a given MDM monitoring account.
/// </summary>
/// <param name="monitoringAccount">Monitoring account for which we want to retrieve the endpoint.</param>
/// <returns>Returns the URI of the given account, or the fixed endpoint if one was specified at construction time.</returns>
public Uri GetEndpoint(string monitoringAccount)
{
if (this.Endpoint != null)
{
// This instance is fixed on a endpoint always return that endpoint.
return this.Endpoint;
}
StampInformation endpoint = this.GetAndUpdateIfRequiredStampInformation(monitoringAccount);
return endpoint.StampMainUri;
}
/// <summary>
/// Gets the endpoint for querying metrics data information of MDM monitoring account.
/// </summary>
/// <param name="monitoringAccount">Monitoring account for which we want to retrieve the endpoint.</param>
/// <returns>Returns the URI of the given account, or the fixed endpoint if one was specified at construction time.</returns>
public Uri GetMetricsDataQueryEndpoint(string monitoringAccount)
{
if (this.Endpoint != null)
{
// This instance is fixed on a endpoint always return that endpoint.
return this.Endpoint;
}
if (this.UseAadUserAuthentication)
{
return this.GetEndpoint(monitoringAccount);
}
StampInformation endpoint = this.GetAndUpdateIfRequiredStampInformation(monitoringAccount);
return endpoint.StampQueryUri;
}
/// <summary>
/// Gets the global endpoint.
/// </summary>
/// <returns>Returns the URI of the global endpoint.</returns>
public Uri GetGlobalEndpoint()
{
return new Uri(ResolveGlobalEnvironments()[this.mdmEnvironmentMapIndex]);
}
/// <summary>
/// Resolves the base URIs to the known global environments, and its order must match the enum <see cref="MdmEnvironment" /> used to identify the desired
/// global environment.
/// </summary>
/// <returns>The base URIs to the known global environments.</returns>
internal static string[] ResolveGlobalEnvironments()
{
if (globalEnvironments != null)
{
return globalEnvironments;
}
lock (GlobalEnvironmentInitializationLock)
{
globalEnvironments = new[]
{
$"https://{ProductionGlobalEnvironmentResolver.ResolveGlobalStampHostName()}",
"https://az-int.metrics.nsatc.net"
};
for (int i = 0; i < globalEnvironments.Length; i++)
{
// Some accounts may have empty home gslb set in that case use the default stamp for the environment.
var defaultGlobalUri = new Uri(globalEnvironments[i]);
GslbToUris[i].TryAdd(string.Empty, defaultGlobalUri);
}
return globalEnvironments;
}
}
/// <summary>
/// Resolves the ip for <paramref name="hostname"/>.
/// </summary>
/// <param name="hostname">The hostname.</param>
/// <param name="throwOnFailure">if set to <c>true</c> [throw on failure].</param>
/// <returns>The IPv4 address.</returns>
internal static async Task<string> ResolveIp(string hostname, bool throwOnFailure)
{
try
{
IPAddress[] addresslist = await Dns.GetHostAddressesAsync(hostname).ConfigureAwait(false);
// use only IPv4 for now.
var resolvedIp = addresslist?.FirstOrDefault(a => a.AddressFamily == AddressFamily.InterNetwork)?.ToString();
if (string.IsNullOrWhiteSpace(resolvedIp))
{
throw new Exception($"Resolved IP is null or empty. Addresslist:{JsonConvert.SerializeObject(addresslist)}.");
}
return resolvedIp;
}
catch (Exception e)
{
Logger.Log(
LoggerLevel.Error,
LogId,
"ResolveIp",
$"Resolving hostname to IP got an exception. HostName:{hostname}, Exception:{e}");
if (throwOnFailure)
{
throw new MetricsClientException($"Resolving {hostname} to IP got an exception.", e);
}
return null;
}
}
/// <summary>
/// Attempts to get the IP address from the cache. If unable it will resolve the IP and cache the value prior to returning.
/// </summary>
/// <param name="endpoint">The endpoint.</param>
/// <returns>The ip url for the endpoint.</returns>
internal static async Task<Uri> GetCachedIpAddress(Uri endpoint)
{
if (DisableDnsResolutionForUnitTest)
{
return endpoint;
}
if (endpoint.Host == "metrics-ob.dc.ad.msft.net")
{
// TODO: it must be some devbox setup issue that caused 127.0.0.1:44300 not to work.
return endpoint;
}
Uri ipUri;
if (HostToIpUriMap.TryGetValue(endpoint, out ipUri))
{
return ipUri;
}
var resolvedIp = await ResolveIp(endpoint.Host, throwOnFailure: true).ConfigureAwait(false);
ipUri = new Uri($"https://{resolvedIp}:{endpoint.Port}");
HostToIpUriMap.TryAdd(endpoint, ipUri);
return ipUri;
}
/// <summary>
/// Gets the relative URL depending on authentication.
/// </summary>
/// <param name="relativeUrl">The relative URL without authentication consideration.</param>
/// <returns>The relative URL considering authentication type.</returns>
internal string GetAuthRelativeUrl(string relativeUrl)
{
return this.UseAadUserAuthentication ? UserApiFirstSegment + relativeUrl : CertApiFirstSegment + relativeUrl;
}
/// <summary>
/// Check this instance's field <see cref="isGlobalEndpoint" /> by handling exception with retry.
/// </summary>
/// <returns>The value of <see cref="isGlobalEndpoint" /> with retry.</returns>
internal bool CheckIfGlobalEndpointWithRetry()
{
try
{
return this.IsGlobalEndpoint;
}
catch (MetricsClientException)
{
}
ResolveGlobalEnvironments();
return this.IsGlobalEndpoint;
}
private static async void RefreshIpAddresses()
{
try
{
Logger.Log(
LoggerLevel.Info,
LogId,
"RefreshIpAddresses",
"Initiated the automatic refresh of IP addresses of home stamp endpoints.");
if (globalEnvironments == null)
{
ResolveGlobalEnvironments();
}
foreach (var kvp in HostToIpUriMap)
{
var hostname = kvp.Key.Host;
var resolvedIp = await ResolveIp(hostname, throwOnFailure: false).ConfigureAwait(false);
if (!string.IsNullOrWhiteSpace(resolvedIp))
{
var existingIp = kvp.Value.Host;
if (resolvedIp != existingIp)
{
HostToIpUriMap.TryUpdate(kvp.Key, new Uri($"https://{resolvedIp}:{kvp.Key.Port}"), kvp.Value);
}
}
}
}
catch (Exception e)
{
Logger.Log(LoggerLevel.Error, LogId, "RefreshIpAddresses", $"Hit exception: {e}");
}
finally
{
if (DnsResolutionUpdateFrequency == TimeSpan.Zero)
{
Logger.Log(LoggerLevel.Error, LogId, "RefreshIpAddresses", "Terminate the refresh task since the refresh frequency is 0");
}
else
{
TimerToRefreshIpAddress.Change(DnsResolutionUpdateFrequency, System.Threading.Timeout.InfiniteTimeSpan);
}
}
}
/// <summary>
/// Helper method used to set-up the map from account to home stamp URI when endpoint is not explicitly set. This method
/// is intended to be called from an instance constructor and it will use the certificate of that instance to retrieve the
/// home stamp URI from the respective global environment. This method should throw any exception that it receives while attempting
/// to build the home stamp map for the accounts.
/// </summary>
private static async void RefreshAccountHomeStamp()
{
if (globalEnvironments == null)
{
Logger.Log(LoggerLevel.Error, LogId, "RefreshAccountHomeStamp", "The global environments haven't been resolved yet.");
return;
}
for (int index = 0; index < NumberOfMdmGlobalEnvironments; index++)
{
try
{
await UpdateAccountToUriMapAsync(HttpClientWithoutAuthentication, globalEnvironments[index], GslbToUris[index], AccountToUriMaps[index]).ConfigureAwait(false);
}
catch (Exception e)
{
Logger.Log(
LoggerLevel.Error,
LogId,
"RefreshAccountHomeStamp",
$"The periodic background task that updates the account to URI map got an exception. Environment:{globalEnvironments[index]}, Exception:{e}");
}
}
if (HomeStampAutomaticUpdateFrequency == TimeSpan.Zero)
{
Logger.Log(LoggerLevel.Error, LogId, "RefreshAccountHomeStamp", "Terminate the home stamp refresh task since the refresh frequency is 0");
}
else
{
TimerToRefreshHomeStamp.Change(HomeStampAutomaticUpdateFrequency, System.Threading.Timeout.InfiniteTimeSpan);
}
}
/// <summary>
/// Helper method that updates the targeted map with the most recent information from the given global stamp.
/// </summary>
/// <param name="httpClient">The HTTP client helper to be used to communicate with the global stamp.</param>
/// <param name="globalEnvironmentUrl">The request URL to be used on the request to retrieve the accounts.</param>
/// <param name="gslbToUris">A dictionary that maps home gslb strings to Uri objects to avoid duplicate Uri objects.</param>
/// <param name="targetMap">The map to be updated with the results of the method run.</param>
/// <returns>
/// A task that can be used to synchronize the completion of the async method.
/// </returns>
private static async Task UpdateAccountToUriMapAsync(
HttpClient httpClient,
string globalEnvironmentUrl,
ConcurrentDictionary<string, Uri> gslbToUris,
ConcurrentDictionary<string, StampInformation> targetMap)
{
Logger.Log(
LoggerLevel.Info,
LogId,
"UpdateAccountToUriMapAsync",
"Initiated the automatic refresh of the account home stamp endpoints.");
foreach (var account in targetMap.Keys)
{
try
{
await GetAndUpdateStampInfoAsync(httpClient, globalEnvironmentUrl, gslbToUris, targetMap, account).ConfigureAwait(false);
}
catch (Exception e)
{
// Exception treated for each call so a single bad/corrupt account does not preclude the update of the others. It is possible
// that this will log a few accounts in case of lost connection but it seems preferable to have a single account failure (think about
// a single account failing constantly) blocking the update of others.
Logger.Log(
LoggerLevel.Error,
LogId,
"AccountToUriUpdate",
"The periodic background task that updates the account to URI map got an exception for account {0}. URL:{1}, Exception:{2}",
account,
globalEnvironmentUrl,
e);
}
}
}
/// <summary>
/// Helper method that gets the URI for a given account and updates the mapping from account to home stamp.
/// </summary>
/// <param name="httpClient">Helper object to be used in the operation.</param>
/// <param name="globalStampUrl">Global stamp to be targeted in the operation.</param>
/// <param name="gslbToUris">Stores all created home stamp URI objects in order to avoid duplications.</param>
/// <param name="targetMap">Map to be updated with the latest information retrived remotely.</param>
/// <param name="account">Account for which the URI should be retrieved.</param>
/// <returns>
/// The URI for the requested account.
/// </returns>
private static async Task<StampInformation> GetAndUpdateStampInfoAsync(HttpClient httpClient, string globalStampUrl, ConcurrentDictionary<string, Uri> gslbToUris, ConcurrentDictionary<string, StampInformation> targetMap, string account)
{
var requestUrl = $"{globalStampUrl}/public/monitoringAccount/{account}/homeStamp";
var response = await HttpClientHelper.GetResponse(new Uri(requestUrl), HttpMethod.Get, httpClient, null, null).ConfigureAwait(false);
var homeStampGslbHostname = JsonConvert.DeserializeObject<string>(response.Item1);
string queryEndpoint = null;
if (!string.IsNullOrEmpty(homeStampGslbHostname))
{
queryEndpoint = await SafeGetStampMetricDataQueryEndpointHostNameAsync(httpClient, "https://" + homeStampGslbHostname).ConfigureAwait(false);
}
Uri homeGslbUri;
var homeGslbKey = string.IsNullOrWhiteSpace(homeStampGslbHostname) ? string.Empty : homeStampGslbHostname;
if (!gslbToUris.TryGetValue(homeGslbKey, out homeGslbUri))
{
homeGslbUri = new Uri("https://" + homeStampGslbHostname);
gslbToUris.AddOrUpdate(homeGslbKey, homeGslbUri, (_, uri) => homeGslbUri);
}
Uri queryEndpointUri;
if (!string.IsNullOrEmpty(queryEndpoint))
{
queryEndpointUri = new Uri("https://" + queryEndpoint);
}
else
{
queryEndpointUri = homeGslbUri;
}
var stampInfo = new StampInformation(homeGslbUri, queryEndpointUri);
targetMap.AddOrUpdate(
account,
stampInfo,
(_, uri) => stampInfo);
return stampInfo;
}
/// <summary>
/// Helper method that gets the metric data query endpoint for the given mdm stamp.
/// Return string.empty if no seperate query endpoint is configured.
/// </summary>
/// <param name="httpClient">Helper object to be used in the operation.</param>
/// <param name="stampUrl">Url of the mdm stamp for which query endpoint needs to be discovered.</param>
/// <returns>
/// The URI for the requested account.
/// </returns>
private static async Task<string> SafeGetStampMetricDataQueryEndpointHostNameAsync(HttpClient httpClient, string stampUrl)
{
try
{
string requestUrl = $"{stampUrl}/public/metricsDataQueryEndpointHostName";
Tuple<string, HttpResponseMessage> response = await HttpClientHelper.GetResponse(new Uri(requestUrl), HttpMethod.Get, httpClient, null, null).ConfigureAwait(false);
var queryEndpoint = JsonConvert.DeserializeObject<string>(response.Item1);
return queryEndpoint;
}
catch (Exception exp)
{
var httpException = exp as MetricsClientException;
if (httpException?.ResponseStatusCode == HttpStatusCode.NotFound)
{
Logger.Log(
LoggerLevel.Info,
LogId,
nameof(SafeGetStampMetricDataQueryEndpointHostNameAsync),
"Failed to resolve query endpoint for the mdm stamp due to query resolution endpoint not found, will use main endpoint.");
}
else
{
Logger.Log(
LoggerLevel.Warning,
LogId,
nameof(SafeGetStampMetricDataQueryEndpointHostNameAsync),
"Failed to resolve query endpoint for the mdm stamp due to unexpected exception, will use main endpoint.",
exp);
}
return string.Empty;
}
}
/// <summary>
/// Gets the endpoint for querying metrics data information of MDM monitoring account.
/// </summary>
/// <param name="monitoringAccount">Monitoring account for which we want to retrieve the endpoint.</param>
/// <returns>Returns the URI of the given account, or the fixed endpoint if one was specified at construction time.</returns>
private StampInformation GetAndUpdateIfRequiredStampInformation(string monitoringAccount)
{
StampInformation endpoint;
if (!AccountToUriMaps[this.mdmEnvironmentMapIndex].TryGetValue(monitoringAccount, out endpoint))
{
// In this case we need to block until operation is completed since this is the information requested by the user.
endpoint = GetAndUpdateStampInfoAsync(
HttpClientWithoutAuthentication,
ResolveGlobalEnvironments()[this.mdmEnvironmentMapIndex],
GslbToUris[this.mdmEnvironmentMapIndex],
AccountToUriMaps[this.mdmEnvironmentMapIndex],
monitoringAccount).GetAwaiter().GetResult();
}
return endpoint;
}
private struct StampInformation
{
public StampInformation(Uri stampMainUri, Uri stampQueryUri)
{
this.StampMainUri = stampMainUri;
this.StampQueryUri = stampQueryUri;
}
public Uri StampMainUri { get; }
public Uri StampQueryUri { get; }
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Reducer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
/// <summary>
/// Reducing function to use on time series data.
/// </summary>
public enum Reducer
{
/// <summary>
/// Reducer not specified. Query is not considered valid.
/// </summary>
Undefined,
/// <summary>
/// Not an reducer per se, it means that any value on the series should be evaluated by the <see cref="Operator"/>.
/// </summary>
Any,
/// <summary>
/// Not an reducer per se, it means that all values on the series should be evaluated by the <see cref="Operator"/>.
/// The evaluation is relaxed in the sense that missing data points (null) are going to be considered
/// as satisfying the <see cref="Operator"/> condition, but at least one data point should exist satisfying the <see cref="Operator"/>.
/// </summary>
All,
/// <summary>
/// Not an reducer per se, it means that all values on the series should be evaluated by the <see cref="Operator"/>.
/// The evaluation is strict in the sense that missing data points (null) are going to be considered
/// as not satisfying the <see cref="Operator"/> conditions.
/// </summary>
AllStrict,
/// <summary>
/// Reduce by calculating the average of all values in the series. Only data points different than
/// null are considered for the average calculation.
/// </summary>
Average,
/// <summary>
/// Reduce by adding all values in the series.
/// </summary>
Sum,
/// <summary>
/// Reduce by selecting the minimum value of the series.
/// </summary>
Min,
/// <summary>
/// Reduce by selecting the maximum value of the series.
/// </summary>
Max
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PreaggregateFiltersManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.PreaggregateFiltersManagement
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
using MetricEnrichmentRuleManagement;
using Newtonsoft.Json;
using Utility;
/// <summary>
/// This class manages get and save operations on preaggregate filters.
/// </summary>
public sealed class PreaggregateFiltersManager
{
private readonly ConnectionInfo connectionInfo;
private readonly HttpClient httpClient;
private readonly string configurationUrlPrefix;
/// <summary>
/// Initializes a new instance of the <see cref="PreaggregateFiltersManager"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the MDM endpoint being used.</param>
public PreaggregateFiltersManager(ConnectionInfo connectionInfo)
{
if (connectionInfo == null)
{
throw new ArgumentNullException(nameof(connectionInfo));
}
this.connectionInfo = connectionInfo;
this.configurationUrlPrefix = this.connectionInfo.GetAuthRelativeUrl("v1/config/preaggregate/dimensionfilters/");
this.httpClient = HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo);
}
/// <summary>
/// Gets all filters satisfying the given constraints.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="preaggregateDimensionNames">The preaggregate dimension names.</param>
/// <param name="count">The count of filters requested. Use 0 to get all filters.</param>
/// <param name="pageOffset">The offset of the requested filters page calculated based on the count of data returned.</param>
/// <returns>All enrichment rules for the given monitoring account.</returns>
public async Task<PreaggregateFilters> GetPreaggregateFiltersAsync(
string monitoringAccount,
string metricNamespace,
string metricName,
IEnumerable<string> preaggregateDimensionNames,
int count,
int pageOffset)
{
if (string.IsNullOrWhiteSpace(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var arguments = new RawPreaggregateFilterQueryArguments(monitoringAccount, metricNamespace, metricName, preaggregateDimensionNames, count, pageOffset);
string serializedArguments = JsonConvert.SerializeObject(arguments);
string path = $"{this.configurationUrlPrefix}monitoringAccount/{monitoringAccount}/getfilters";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount))
{
Path = path
};
var response = await HttpClientHelper.GetResponseAsStringAsync(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount,
this.configurationUrlPrefix,
serializedContent: serializedArguments).ConfigureAwait(false);
var rules = JsonConvert.DeserializeObject<PreaggregateFilters>(response);
return rules;
}
/// <summary>
/// Adds the given pre-aggregate filters to storage for given monitoring account.
/// </summary>
/// <param name="monitoringAccount">Monitoring account for the enrichment rule.</param>
/// <param name="preaggregateFilters">Preaggregate filters which needs to be added to the storage.</param>
/// <returns>Task representing the operation.</returns>
public async Task AddPreaggregateFilters(string monitoringAccount, PreaggregateFilters preaggregateFilters)
{
if (string.IsNullOrEmpty(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (preaggregateFilters == null)
{
throw new ArgumentNullException(nameof(preaggregateFilters));
}
string path = $"{this.configurationUrlPrefix}monitoringAccount/{monitoringAccount}/addfilters";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount))
{
Path = path,
Query = "apiVersion=1"
};
string serializedContent = JsonConvert.SerializeObject(preaggregateFilters);
await HttpClientHelper.GetResponseAsStringAsync(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount,
this.configurationUrlPrefix,
serializedContent: serializedContent).ConfigureAwait(false);
}
/// <summary>
/// Removes the given pre-aggregate filters from storage for given monitoring account.
/// </summary>
/// <param name="monitoringAccount">Monitoring account for the enrichment rule.</param>
/// <param name="preaggregateFilters">Preaggregate filters which needs to be removed from the storage.</param>
/// <returns>Task representing the operation.</returns>
public async Task RemovePreaggregateFilters(string monitoringAccount, PreaggregateFilters preaggregateFilters)
{
if (string.IsNullOrEmpty(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (preaggregateFilters == null)
{
throw new ArgumentNullException(nameof(preaggregateFilters));
}
string path = $"{this.configurationUrlPrefix}monitoringAccount/{monitoringAccount}/removefilters";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount))
{
Path = path,
Query = "apiVersion=1"
};
string serializedContent = JsonConvert.SerializeObject(preaggregateFilters);
await HttpClientHelper.GetResponseAsStringAsync(
uriBuilder.Uri,
HttpMethod.Delete,
this.httpClient,
monitoringAccount,
this.configurationUrlPrefix,
serializedContent: serializedContent).ConfigureAwait(false);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IFilteredTimeSeries.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System.Collections.Generic;
using Metrics;
using Microsoft.Online.Metrics.Serialization.Configuration;
/// <summary>
/// An interface for filtered time series.
/// </summary>
public interface IFilteredTimeSeries
{
/// <summary>
/// Gets the metric identifier.
/// </summary>
MetricIdentifier MetricIdentifier { get; }
/// <summary>
/// Set of valid dimension name-value pairs that meet the query condition.
/// </summary>
IReadOnlyList<KeyValuePair<string, string>> DimensionList { get; }
/// <summary>
/// Gets the evaluated value for this time series that meets the condition set in the query (provided for evidence and/or sorting).
/// </summary>
double EvaluatedResult { get; }
/// <summary>
/// Gets the full collection time series values for the query interval. It should be null if
/// the query did not request the full collection of values to be returned.
/// </summary>
/// <remarks>double.NaN is the sentinel used to indicate there is no metric value.</remarks>
IReadOnlyList<KeyValuePair<SamplingType, double[]>> TimeSeriesValues { get; }
}
}<file_sep>// -----------------------------------------------------------------------
// <copyright file="IMonitorConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Monitors
{
using System.Collections.Generic;
using System.Threading.Tasks;
using Configuration;
/// <summary>
/// The interface of managing monitor configuration.
/// </summary>
internal interface IMonitorConfigurationManager
{
/// <summary>
/// Sync all monitor configuration across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <param name="validate">Whether to validate the monitor configuration in target accounts.</param>
/// <returns>A list of configuration update result.</returns>
Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false,
bool validate = true);
/// <summary>
/// Sync all monitor configuration under specific namespace across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="metricNamespace">Metric namespace.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <param name="validate">Whether to validate the monitor configuration in target accounts.</param>
/// <returns>A list of configuration update result.</returns>
Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
bool skipVersionCheck = false,
bool validate = true);
/// <summary>
/// Sync monitor configuration under specific metric across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="metricNamespace">Metric namespace.</param>
/// <param name="metricName">Metric name.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <param name="validate">Whether to validate the monitor configuration in target accounts.</param>
/// <returns>A list of configuration update result.</returns>
Task<ConfigurationUpdateResultList> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
string metricName,
bool skipVersionCheck = false,
bool validate = true);
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="FilteredQueryResponseDeserializer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.Collections.Generic;
using System.IO;
using Online.Metrics.Serialization;
using Query;
/// <summary>
/// The class to deserialize the binary payload in response.
/// </summary>
public sealed class FilteredQueryResponseDeserializer
{
/// <summary>
/// Deserializes the specified stream.
/// </summary>
/// <param name="stream">The stream to deserialize.</param>
/// <returns>
/// A list of <see cref="FilteredTimeSeriesQueryResponse"/>.
/// </returns>
public static IReadOnlyList<IFilteredTimeSeriesQueryResponse> Deserialize(Stream stream)
{
using (var reader = new BinaryReader(stream))
{
var numOfResponses = (int)SerializationUtils.ReadUInt32FromBase128(reader);
var results = new List<FilteredTimeSeriesQueryResponse>(numOfResponses);
for (int i = 0; i < numOfResponses; i++)
{
// Strip off a version added by query service host - QueryCoordinatorHost.cs.
var version = reader.ReadByte();
var numOfQueryResults = reader.ReadInt32();
for (int k = 0; k < numOfQueryResults; k++)
{
var filteredTimeSeriesQueryResponse = new FilteredTimeSeriesQueryResponse();
filteredTimeSeriesQueryResponse.Deserialize(reader);
results.Add(filteredTimeSeriesQueryResponse);
}
}
return results;
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="FilteredTimeSeriesQueryResponse.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Runtime.CompilerServices;
using Metrics;
using Microsoft.Cloud.Metrics.Client.Configuration;
using Newtonsoft.Json;
using Online.Metrics.Serialization;
using Online.Metrics.Serialization.Configuration;
using Utility;
/// <summary>
/// TopN query response corresponding to <see cref="FilteredTimeSeriesQueryRequest" />.
/// </summary>
public sealed class FilteredTimeSeriesQueryResponse : IFilteredTimeSeriesQueryResponse
{
/// <summary>
/// The version to indicate complete failure of a <see cref="FilteredTimeSeriesQueryResponse"/>.
/// </summary>
public const byte VersionToIndicateCompleteFailure = byte.MaxValue;
/// <summary>
/// The current stable version.
/// </summary>
public const byte CurrentVersion = 3;
/// <summary>
/// The next version to be supported in the future, if different from <see cref="CurrentVersion"/>.
/// </summary>
/// <remarks>
/// Put version change history here.
/// The initial version starts with 1.
/// Version 2: Serializer and deserializer can process query messages in query result.
/// Version 3: Handle more than 1 time series metadata as required in the support of multi-account query.
/// </remarks>
public const byte NextVersion = 3;
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeriesQueryResponse"/> class.
/// </summary>
public FilteredTimeSeriesQueryResponse()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeriesQueryResponse" /> class.
/// </summary>
/// <param name="startTime">Start time</param>
/// <param name="endTime">End time</param>
/// <param name="timeResolutionInMinutes">Time resolution in mins</param>
/// <param name="filteredTimeSeriesList">The time series returned</param>
/// <param name="errorMessage">The error message.</param>
internal FilteredTimeSeriesQueryResponse(
DateTime startTime,
DateTime endTime,
int timeResolutionInMinutes,
IReadOnlyList<FilteredTimeSeries> filteredTimeSeriesList,
string errorMessage = null)
{
this.StartTimeUtc = startTime;
this.EndTimeUtc = endTime;
this.TimeResolutionInMinutes = timeResolutionInMinutes;
this.FilteredTimeSeriesList = filteredTimeSeriesList;
this.DiagnosticInfo = new DiagnosticInfo { ErrorMessage = errorMessage };
}
/// <summary>
/// Gets the query request.
/// </summary>
public FilteredTimeSeriesQueryRequest QueryRequest { get; private set; }
/// <summary>
/// Gets the end time in UTC for the query results.
/// </summary>
public DateTime EndTimeUtc { get; private set; }
/// <summary>
/// Gets the start time in UTC for the query results.
/// </summary>
public DateTime StartTimeUtc { get; private set; }
/// <summary>
/// Gets the time resolution in milliseconds for the query results.
/// </summary>
public int TimeResolutionInMinutes { get; private set; }
/// <summary>
/// Gets the <see cref="FilteredTimeSeries"/> list. Each item represents a single time series where start time, end time and time resolution
/// is represented by this object members.
/// </summary>
public IReadOnlyList<IFilteredTimeSeries> FilteredTimeSeriesList { get; private set; }
/// <summary>
/// Gets the error code.
/// </summary>
public FilteredTimeSeriesQueryResponseErrorCode ErrorCode { get; private set; } = FilteredTimeSeriesQueryResponseErrorCode.Success;
/// <summary>
/// Gets the diagnostics information.
/// </summary>
public DiagnosticInfo DiagnosticInfo { get; private set; }
/// <summary>
/// Deserializes to populate this object.
/// </summary>
/// <param name="reader">The reader.</param>
public void Deserialize(BinaryReader reader)
{
byte version;
uint resultTimeSeriesCount;
Dictionary<int, string> stringTable;
long stringTableLengthInByte;
SeriesMetadata seriesMetadata;
Dictionary<int, SeriesMetadata> metadataTable;
long metadataTableLengthInByte;
if (!this.ReadPreamble(
reader,
out version,
out resultTimeSeriesCount,
out stringTable,
out stringTableLengthInByte,
out seriesMetadata,
out metadataTable,
out metadataTableLengthInByte))
{
return;
}
var filteredTimeSeriesList = new FilteredTimeSeries[resultTimeSeriesCount];
for (int i = 0; i < resultTimeSeriesCount; i++)
{
filteredTimeSeriesList[i] = ReadTimeSeries(version, reader, seriesMetadata, stringTable, metadataTable);
}
this.FilteredTimeSeriesList = filteredTimeSeriesList;
// we have read the string table and metadata tables, so just skip them to move to the correct position.
reader.BaseStream.Position += metadataTableLengthInByte + stringTableLengthInByte;
// additional query messages in version 2 and above
// For now, we just read query messages. Further decisions need to be made on how to store messages
if (version > 1)
{
int messageCount = reader.ReadByte();
for (int i = 0; i < messageCount; i++)
{
int topic = reader.ReadByte();
int level = reader.ReadByte();
int source = reader.ReadByte();
string content = reader.ReadString();
}
}
}
/// <summary>
/// Deserializes to populate this object.
/// </summary>
/// <param name="reader">The reader.</param>
/// <returns>The IEnumerable with FilteredTimeSeries.</returns>
public IEnumerable<FilteredTimeSeries> ReadFilteredTimeSeries(BinaryReader reader)
{
var numOfResponses = (int)SerializationUtils.ReadUInt32FromBase128(reader);
for (int j = 0; j < numOfResponses; j++)
{
// Strip off a version added by query service host - QueryCoordinatorHost.cs.
reader.ReadByte();
var numOfQueryResults = reader.ReadInt32();
for (int k = 0; k < numOfQueryResults; k++)
{
byte version;
uint resultTimeSeriesCount;
Dictionary<int, string> stringTable;
long stringTableLengthInByte;
SeriesMetadata seriesMetadata;
Dictionary<int, SeriesMetadata> metadataTable;
long metadataTableLengthInByte;
if (!this.ReadPreamble(
reader,
out version,
out resultTimeSeriesCount,
out stringTable,
out stringTableLengthInByte,
out seriesMetadata,
out metadataTable,
out metadataTableLengthInByte))
{
yield break;
}
for (int i = 0; i < resultTimeSeriesCount; i++)
{
yield return ReadTimeSeries(version, reader, seriesMetadata, stringTable, metadataTable);
}
}
}
}
private static SeriesMetadata DeserializeTimeSeriesMetadata(BinaryReader reader, Dictionary<int, string> stringTable)
{
var metricIdentifier = new MetricIdentifier(
DeserializeStringByIndex(reader, stringTable),
DeserializeStringByIndex(reader, stringTable),
DeserializeStringByIndex(reader, stringTable));
var dimensionsCount = reader.ReadByte();
var dimensionNames = new string[dimensionsCount];
for (var i = 0; i < dimensionsCount; i++)
{
dimensionNames[i] = DeserializeStringByIndex(reader, stringTable);
}
return new SeriesMetadata(metricIdentifier, dimensionNames);
}
private static FilteredTimeSeries ReadTimeSeries(
byte version,
BinaryReader reader,
SeriesMetadata seriesMetadata,
Dictionary<int, string> stringTable,
Dictionary<int, SeriesMetadata> metadataTable)
{
if (version >= 3)
{
var index = (int)SerializationUtils.ReadUInt32FromBase128(reader);
seriesMetadata = metadataTable[index];
}
var metricIdentifier = seriesMetadata.MetricIdentifier;
var dimensionNames = seriesMetadata.DimensionNames;
var dimensionsCount = dimensionNames.Length;
var dimensionList = new KeyValuePair<string, string>[dimensionsCount];
for (int k = 0; k < dimensionsCount; k++)
{
dimensionList[k] = new KeyValuePair<string, string>(dimensionNames[k], DeserializeStringByIndex(reader, stringTable));
}
var propertiesCount = reader.ReadByte();
var evaluatedValues = new KeyValuePair<string, double>[propertiesCount];
for (int k = 0; k < propertiesCount; k++)
{
evaluatedValues[k] = new KeyValuePair<string, double>(DeserializeStringByIndex(reader, stringTable), reader.ReadDouble());
}
var samplingTypesCount = reader.ReadByte();
var timeSeriesData = new KeyValuePair<SamplingType, double[]>[samplingTypesCount];
for (int k = 0; k < samplingTypesCount; k++)
{
var samplingTypeString = DeserializeStringByIndex(reader, stringTable);
var samplingType = SamplingType.BuiltInSamplingTypes.ContainsKey(samplingTypeString)
? SamplingType.BuiltInSamplingTypes[samplingTypeString]
: new SamplingType(samplingTypeString);
timeSeriesData[k] = new KeyValuePair<SamplingType, double[]>(samplingType, DoubleValueSerializer.Deserialize(reader));
}
double evaluatedResult = propertiesCount == 0 ? double.NaN : evaluatedValues[0].Value;
return new FilteredTimeSeries(metricIdentifier, dimensionList, evaluatedResult, timeSeriesData);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static string DeserializeStringByIndex(BinaryReader reader, Dictionary<int, string> stringTable)
{
var index = (int)SerializationUtils.ReadUInt32FromBase128(reader);
return stringTable[index];
}
private bool ReadPreamble(
BinaryReader reader,
out byte version,
out uint resultTimeSeriesCount,
out Dictionary<int, string> stringTable,
out long stringTableLengthInByte,
out SeriesMetadata seriesMetadata,
out Dictionary<int, SeriesMetadata> metadataTable,
out long metadataTableLengthInByte)
{
resultTimeSeriesCount = 0;
stringTable = null;
stringTableLengthInByte = 0;
seriesMetadata = null;
metadataTable = null;
metadataTableLengthInByte = 0;
version = reader.ReadByte();
if (version == 0)
{
throw new MetricsClientException(
$"The server didn't respond with the right version of serialization - the initial version is 1 but the server responds with version 0.");
}
this.DiagnosticInfo = new DiagnosticInfo();
if (version == VersionToIndicateCompleteFailure)
{
this.ErrorCode = (FilteredTimeSeriesQueryResponseErrorCode)reader.ReadInt16();
// The trace ID and others will be filled in by callers.
this.DiagnosticInfo.ErrorMessage = reader.ReadString();
bool returnRequestObjectOnFailure = reader.ReadBoolean();
if (returnRequestObjectOnFailure)
{
this.QueryRequest = JsonConvert.DeserializeObject<FilteredTimeSeriesQueryRequest>(reader.ReadString());
}
return false;
}
if (version > NextVersion)
{
throw new MetricsClientException(
$"The server didn't respond with the right version of serialization. CurrentVersion : {CurrentVersion}, NextVersion : {NextVersion}, Responded: {version}.");
}
var hasDataQualityInfo = reader.ReadBoolean();
if (hasDataQualityInfo)
{
var queryResultQualityInfo = new QueryResultQualityInfo();
queryResultQualityInfo.Deserialize(reader);
}
this.StartTimeUtc = new DateTime(
(long)SerializationUtils.ReadUInt64FromBase128(reader) * TimeSpan.TicksPerMinute,
DateTimeKind.Utc);
this.EndTimeUtc = this.StartTimeUtc.AddMinutes(SerializationUtils.ReadUInt32FromBase128(reader));
this.TimeResolutionInMinutes = (int)SerializationUtils.ReadUInt32FromBase128(reader);
resultTimeSeriesCount = SerializationUtils.ReadUInt32FromBase128(reader);
/* Read string table. */
var currentPosition = reader.BaseStream.Position;
var stringTableRelativePosition = reader.ReadUInt64();
reader.BaseStream.Position = currentPosition + (long)stringTableRelativePosition;
var stringTableStartPostion = reader.BaseStream.Position;
var stringTableSize = SerializationUtils.ReadUInt32FromBase128(reader);
stringTable = new Dictionary<int, string>((int)stringTableSize);
for (int i = 0; i < stringTableSize; i++)
{
stringTable.Add(i, reader.ReadString());
}
stringTableLengthInByte = reader.BaseStream.Position - stringTableStartPostion;
// + sizeof (double) since we just read the stringTableRelativePosition as double.
reader.BaseStream.Position = currentPosition + sizeof(double);
if (version >= 3)
{
/* Read metadata table */
currentPosition = reader.BaseStream.Position;
var metadataTableRelativePosition = reader.ReadUInt64();
reader.BaseStream.Position = currentPosition + (long)metadataTableRelativePosition;
var metadataTableStartPostion = reader.BaseStream.Position;
var metadataTableSize = SerializationUtils.ReadUInt32FromBase128(reader);
metadataTable = new Dictionary<int, SeriesMetadata>((int)metadataTableSize);
for (var i = 0; i < metadataTableSize; i++)
{
metadataTable.Add(i, DeserializeTimeSeriesMetadata(reader, stringTable));
}
metadataTableLengthInByte = reader.BaseStream.Position - metadataTableStartPostion;
// + sizeof (double) since we just read the metadataTableRelativePosition as double.
reader.BaseStream.Position = currentPosition + sizeof(double);
}
if (resultTimeSeriesCount > 0)
{
if (version < 3)
{
seriesMetadata = DeserializeTimeSeriesMetadata(reader, stringTable);
this.QueryRequest = new FilteredTimeSeriesQueryRequest(seriesMetadata.MetricIdentifier);
}
else
{
this.QueryRequest = new FilteredTimeSeriesQueryRequest(metadataTable.Values.First().MetricIdentifier);
}
}
return true;
}
private sealed class SeriesMetadata
{
public SeriesMetadata(MetricIdentifier metricIdentifier, string[] dimensionNames)
{
this.MetricIdentifier = metricIdentifier;
this.DimensionNames = dimensionNames;
}
public MetricIdentifier MetricIdentifier { get; }
public string[] DimensionNames { get; }
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="QueryResultQualityInfo.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
/// <summary>
/// Represents the query result quality information.
/// </summary>
/// <remarks>
/// TODO: Add time based data loss support.
/// </remarks>
public sealed class QueryResultQualityInfo
{
private readonly ConcurrentDictionary<string, int> droppedTimeSeries = new ConcurrentDictionary<string, int>();
private int totalDroppedTimeSeries;
private int totalEstimatedTimeSeries;
/// <summary>
/// Gets or sets the total estimated time series for the query.
/// </summary>
public int TotalEstimatedTimeSeries
{
get { return this.totalEstimatedTimeSeries; }
set { this.totalEstimatedTimeSeries = value; }
}
/// <summary>
/// Gets or sets the total dropped time series for the query.
/// </summary>
public int TotalDroppedTimeSeries
{
get { return this.totalDroppedTimeSeries; }
}
/// <summary>
/// Gets the total evaluated time series for the query. This represents the number of raw series query
/// service processed to obtain the customer-facing result.
/// </summary>
public int TotalEvaluatedTimeSeries
{
get { return this.TotalEstimatedTimeSeries - this.totalDroppedTimeSeries; }
}
/// <summary>
/// Gets the dropped time series reasons.
/// </summary>
/// <remarks>
/// Used for unit testing purposes.
/// </remarks>
/// <returns>Dropped time series reasons.</returns>
public ICollection<string> GetDroppedTimeSeriesReasons()
{
return this.droppedTimeSeries.Keys;
}
/// <summary>
/// Gets the dropped time series by reason.
/// </summary>
/// <param name="reason">The reason.</param>
/// <remarks>
/// Used for unit testing purposes.
/// </remarks>
/// <returns>Dropped time series by reason.</returns>
public int GetDroppedTimeSeriesByReason(string reason)
{
return this.droppedTimeSeries[reason];
}
/// <summary>
/// Register dropped time series during the query.
/// </summary>
/// <param name="reason">The reason for dropping the time series.</param>
/// <param name="count">The count.</param>
public void RegisterDroppedTimeSeries(string reason, int count)
{
if (count > 0)
{
Interlocked.Add(ref this.totalDroppedTimeSeries, count);
this.droppedTimeSeries.AddOrUpdate(
reason,
count,
(key, existingValue) => existingValue + count);
}
}
/// <summary>
/// Registers the estimated time series during a query. This is the
/// number of time series we would ideally get back from the store.
/// </summary>
/// <param name="count">The count.</param>
public void RegisterEstimatedTimeSeries(int count)
{
Interlocked.Add(ref this.totalEstimatedTimeSeries, count);
}
/// <summary>
/// Returns a <see cref="string" /> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="string" /> that represents this instance.
/// </returns>
public override string ToString()
{
string result = $"Total Estimated TimeSeries:{this.TotalEstimatedTimeSeries}, Total Dropped TimeSeries:{this.totalDroppedTimeSeries}.";
if (this.totalDroppedTimeSeries > 0)
{
result = this.droppedTimeSeries.Aggregate(result, (current, dropReason) => current + $"{dropReason.Key}:{dropReason.Value}");
}
return result;
}
/// <summary>
/// Deserializes query quality info from given stream reader.
/// </summary>
/// <param name="reader">The stream reader containing quality information.</param>
public void Deserialize(BinaryReader reader)
{
// Any modifications here should be replicated to D:\onebranch\EngSys\MDA\MetricsAndHealth\src\DistributedQuery\Interfaces\QueryResultQuality.cs
reader.ReadByte();
this.TotalEstimatedTimeSeries = reader.ReadInt32();
this.totalDroppedTimeSeries = reader.ReadInt32();
if (this.totalDroppedTimeSeries > 0)
{
var numberOfReasons = reader.ReadInt32();
for (int i = 0; i < numberOfReasons; i++)
{
var reason = reader.ReadString();
var dropCount = reader.ReadInt32();
this.RegisterDroppedTimeSeries(reason, dropCount);
}
}
}
/// <summary>
/// Serializes the quality info into given stream writer.
/// </summary>
/// <param name="writer">The stream writer.</param>
public void Serialize(BinaryWriter writer)
{
// Any modifications here should be replicated to D:\onebranch\EngSys\MDA\MetricsAndHealth\src\DistributedQuery\Interfaces\QueryResultQuality.cs
// Version
writer.Write((byte)0);
writer.Write(this.TotalEstimatedTimeSeries);
writer.Write(this.totalDroppedTimeSeries);
if (this.totalDroppedTimeSeries > 0)
{
writer.Write(this.droppedTimeSeries.Count);
foreach (var dropReason in this.droppedTimeSeries)
{
writer.Write(dropReason.Key);
writer.Write(dropReason.Value);
}
}
}
/// <summary>
/// Aggregates the data from given qualty info.
/// </summary>
/// <param name="source">The source quality info.</param>
public void Aggregate(QueryResultQualityInfo source)
{
if (source.totalDroppedTimeSeries > 0)
{
foreach (var reason in source.droppedTimeSeries)
{
this.RegisterDroppedTimeSeries(reason.Key, reason.Value);
}
}
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IMonitoringAccountConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
using System.Threading.Tasks;
/// <summary>
/// This interface that manages monitoring account configurations.
/// </summary>
public interface IMonitoringAccountConfigurationManager
{
/// <summary>
/// Get the monitoring account specified by the monitoring account name.
/// </summary>
/// <param name="monitoringAccountName">The name of the monitoring account.</param>
/// <returns>The monitoring account.</returns>
Task<IMonitoringAccount> GetAsync(string monitoringAccountName);
/// <summary>
/// Creates a monitoring account with provided configuration.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration.</param>
/// <param name="stampHostName">The stamp name such as prod3.metrics.nsatc.net as documented @ https://aka.ms/mdm-endpoints.</param>
/// <returns>A task the caller can wait on.</returns>
Task CreateAsync(IMonitoringAccount monitoringAccount, string stampHostName);
/// <summary>
/// Create a new monitoring account named <paramref name="newMonitoringAccountName"/> on stamp <paramref name="stampHostName"/> by copying the common settings from <paramref name="monitoringAccountToCopyFrom" />.
/// </summary>
/// <param name="newMonitoringAccountName">The new monitoring account name.</param>
/// <param name="monitoringAccountToCopyFrom">The name of the monitoring account where common settings are copied from.</param>
/// <param name="stampHostName">The stamp name such as prod3.metrics.nsatc.net as documented @ https://aka.ms/mdm-endpoints.</param>
/// <returns>A task the caller can wait on.</returns>
Task CreateAsync(string newMonitoringAccountName, string monitoringAccountToCopyFrom, string stampHostName);
/// <summary>
/// Save the monitoring account configuration provided.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration to save.</param>
/// <param name="skipVersionCheck">Flag indicating whether or not the version flag should be honored.</param>
/// <returns>A task the caller can wait on.</returns>
Task SaveAsync(IMonitoringAccount monitoringAccount, bool skipVersionCheck = false);
/// <summary>
/// Delete the monitoring account.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration to delete.</param>
/// <returns>A task the caller can wait on.</returns>
Task DeleteAsync(string monitoringAccount);
/// <summary>
/// Un-Delete the monitoring account.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration to un-delete.</param>
/// <returns>A task the caller can wait on.</returns>
Task UnDeleteAsync(string monitoringAccount);
/// <summary>
/// Synchronizes the monitoring account configuration asynchronous.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="skipVersionCheck">if set to <c>true</c> [skip version check].</param>
/// <returns>A list of <see cref="ConfigurationUpdateResult"/>.</returns>
Task<IReadOnlyList<ConfigurationUpdateResult>> SyncMonitoringAccountConfigurationAsync(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false);
}
}<file_sep>// <copyright file="ConfigurationUpdateResultList.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
/// <summary>
/// Stores list of configuration update result for a configuration.
/// </summary>
public sealed class ConfigurationUpdateResultList
{
/// <summary>
/// Gets or sets the monitoring account.
/// </summary>
public string MonitoringAccount { get; set; }
/// <summary>
/// Gets or sets the metric namespace.
/// </summary>
public string MetricNamespace { get; set; }
/// <summary>
/// Gets or sets the name of the metric.
/// </summary>
public string MetricName { get; set; }
/// <summary>
/// Gets or sets a value indicating whether this <see cref="ConfigurationUpdateResultList"/> is success.
/// </summary>
public bool Success { get; set; }
/// <summary>
/// Gets or sets the exception message if the update is not successful.
/// </summary>
public string ExceptionMessage { get; set; }
/// <summary>
/// Gets or sets the configuration update results.
/// </summary>
public IReadOnlyList<IConfigurationUpdateResult> ConfigurationUpdateResults { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="EtwTraceLevel.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Enumeration with the ETW event levels.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System.Diagnostics.CodeAnalysis;
// ReSharper disable UnusedMember.Global
/// <summary>
/// Enumerates the ETW trace levels used by most providers.
/// </summary>
[SuppressMessage("Microsoft.Design", "CA1028:EnumStorageShouldBeInt32", Justification = "Needs to have the same size as the native equivalent")]
internal enum EtwTraceLevel : byte
{
/// <summary>
/// Always log.
/// </summary>
LogAlways = 0x0,
/// <summary>
/// Critical logging only.
/// </summary>
Critical = 0x1,
/// <summary>
/// Logging errors.
/// </summary>
Error = 0x2,
/// <summary>
/// Logging warnings.
/// </summary>
Warning = 0x3,
/// <summary>
/// Informational logging.
/// </summary>
Informational = 0x4,
/// <summary>
/// Verbose logging.
/// </summary>
Verbose = 0x5
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="RawListener.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Wraps the win32 calls to create an ETW listener that allows that client to receive the buffer and event callbacks.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
/// <summary>
/// Type that wraps the win32 calls to create an ETW listener that allows that client to receive
/// the raw buffer and event callbacks when processing a trace (real-time or file).
/// </summary>
internal unsafe sealed class RawListener : IDisposable
{
/// <summary>
/// The .Net 3.5 does not have Environment.Is64BitProcess use the sizeof(IntPtr) as a hack
/// to figure this out.
/// </summary>
private static readonly bool Is64BitProcess = sizeof(IntPtr) == 8;
/// <summary>
/// Win32 opaque trace handles passed to the APIs to refer to the trace sessions being listened.
/// </summary>
/// <remarks>
/// Create as an array because it is the way that it is passed to the OpenTrace API.
/// </remarks>
private readonly ulong[] traceHandles;
/// <summary>
/// Keeps the trace logs structures on memory while processing the traces.
/// </summary>
/// <remarks>
/// Need to suppress ReSharper here because this struct ensures that the ETW callbacks
/// are kept alive.
/// </remarks>
// ReSharper disable once PrivateFieldCanBeConvertedToLocalVariable
private readonly NativeMethods.EventTraceLogfilew[] traceLogs;
/// <summary>
/// Initializes a new instance of the <see cref="RawListener"/> class.
/// </summary>
/// <param name="sessionOrEtlFiles">
/// An enumerable with a single item if listen to an ETW real-time session or a sequence of ETL file names
/// that are going to be processed together.
/// </param>
/// <param name="eventRecordCallback">
/// The event record callback provided by the client. This can be null if the
/// eventBufferCallback parameter is not.
/// </param>
/// <param name="eventBufferCallback">
/// The event buffer callback provided by the client. This can be null if the
/// eventRecordCallback parameter is not.
/// </param>
/// <param name="isFileTrace">
/// Boolean to indicate whether this is a file or real-time ETW listener.
/// </param>
/// <param name="useRawTimestamps">
/// True to indicate that the timestamps of events will be raw or false if ETW should convert them to FILETIME.
/// </param>
private RawListener(
IEnumerable<string> sessionOrEtlFiles,
NativeMethods.EventRecordCallback eventRecordCallback,
NativeMethods.EventTraceBufferCallback eventBufferCallback,
bool isFileTrace,
bool useRawTimestamps = true)
{
if (sessionOrEtlFiles == null)
{
throw new ArgumentNullException("sessionOrEtlFiles");
}
var tracesEnum = from trace in sessionOrEtlFiles
where !string.IsNullOrEmpty(trace)
select trace;
var traces = tracesEnum.ToList();
if (traces.Count < 1)
{
throw new ArgumentException(
"At least one non-null, non-empty session or etl file name must be provided");
}
if (eventRecordCallback == null && eventBufferCallback == null)
{
throw new ArgumentException("At least one of the callbacks must be specified");
}
var traceIndex = 0;
this.traceHandles = new ulong[traces.Count];
this.traceLogs = new NativeMethods.EventTraceLogfilew[traces.Count];
foreach (var traceName in traces)
{
this.traceLogs[traceIndex].EventCallback = eventRecordCallback;
this.traceLogs[traceIndex].BufferCallback = eventBufferCallback;
this.traceLogs[traceIndex].LogFileMode = NativeMethods.ProcessTraceModeEventRecord
| (useRawTimestamps ? NativeMethods.ProcessTraceModeRawTimestamp : 0);
if (isFileTrace)
{
this.traceLogs[traceIndex].LogFileName = traceName;
}
else
{
this.traceLogs[traceIndex].LoggerName = traceName;
this.traceLogs[traceIndex].LogFileMode |= NativeMethods.ProcessTraceModeRealTime;
}
// New ETW session mode for Windows 8.1, Server 2012 R2, and later that avoids
// slow consumers on one session to affect other sessions. Adding by default to all
// sessions since it does not have adverse impact and it is just ignored by legacy OSes.
this.traceLogs[traceIndex].LogFileMode |= NativeMethods.EventTraceIndependentSessionMode;
var traceHandle = NativeMethods.OpenTrace(ref this.traceLogs[traceIndex]);
if ((!Is64BitProcess && traceHandle == NativeMethods.InvalidTracehandle32) ||
(Is64BitProcess && traceHandle == NativeMethods.InvalidTracehandle64))
{
throw new Win32Exception(string.Format(
CultureInfo.InvariantCulture,
"OpenTrace call for trace '{0}' failed.",
traceName));
}
this.traceHandles[traceIndex++] = traceHandle;
}
}
/// <summary>
/// Creates a new instance of the <see cref="RawListener"/> class to listen to an
/// ETW real-time session.
/// </summary>
/// <param name="sessionName">
/// Name of the ETW real-time session from which the client wants to listen for events.
/// </param>
/// <param name="eventRecordCallback">
/// The event record callback provided by the client. This can be null if the
/// eventBufferCallback parameter is not.
/// </param>
/// <param name="eventBufferCallback">
/// The event buffer callback provided by the client. This can be null if the
/// eventRecordCallback parameter is not.
/// </param>
/// <param name="useRawTimestamps">
/// True to indicate that the timestamps of events will be raw or false if ETW should convert them to FILETIME.
/// </param>
/// <returns>
/// The new instance of the <see cref="RawListener"/> class to listen to an
/// ETW real-time session.
/// </returns>
public static RawListener CreateRealTimeListener(
string sessionName,
NativeMethods.EventRecordCallback eventRecordCallback,
NativeMethods.EventTraceBufferCallback eventBufferCallback,
bool useRawTimestamps = true)
{
if (string.IsNullOrEmpty(sessionName))
{
throw new ArgumentException("Session name cannot be null or empty", "sessionName");
}
return new RawListener(new[] { sessionName }, eventRecordCallback, eventBufferCallback, false, useRawTimestamps);
}
/// <summary>
/// Creates a new instance of the <see cref="RawListener"/> class to process a set of ETL files.
/// </summary>
/// <param name="etlFiles">
/// Sequence with the names of the ETL files from which the client wants to listen for events.
/// </param>
/// <param name="eventRecordCallback">
/// The event record callback provided by the client. This can be null if the
/// eventBufferCallback parameter is not.
/// </param>
/// <param name="eventBufferCallback">
/// The event buffer callback provided by the client. This can be null if the
/// eventRecordCallback parameter is not.
/// </param>
/// <param name="useRawTimestamps">
/// True to indicate that the timestamps of events will be raw or false if ETW should convert them to FILETIME.
/// </param>
/// <returns>
/// The new instance of the <see cref="RawListener"/> class to process a set of ETL files.
/// </returns>
public static RawListener CreateEtlFileListener(
IEnumerable<string> etlFiles,
NativeMethods.EventRecordCallback eventRecordCallback,
NativeMethods.EventTraceBufferCallback eventBufferCallback,
bool useRawTimestamps = true)
{
if (etlFiles == null)
{
throw new ArgumentNullException("etlFiles");
}
var nonEmptyEtlsEnum = from etl in etlFiles
where !string.IsNullOrEmpty(etl)
select etl;
var nonEmptyEtls = nonEmptyEtlsEnum.ToList();
if (nonEmptyEtls.Count < 1)
{
throw new ArgumentException(
"At least one non-null and non-empty etl file name must be provided");
}
return new RawListener(nonEmptyEtls, eventRecordCallback, eventBufferCallback, true, useRawTimestamps);
}
/// <summary>
/// Starts to listen for the events, this will trigger the callbacks passed on the
/// constructor to be called.
/// </summary>
public void Process()
{
var error = NativeMethods.ProcessTrace(
this.traceHandles,
(uint)this.traceHandles.Length,
IntPtr.Zero,
IntPtr.Zero);
if (error != NativeMethods.ErrorSuccess)
{
throw new Win32Exception(error);
}
}
/// <summary>
/// Disposes the associated native resources.
/// </summary>
public void Dispose()
{
foreach (var handle in this.traceHandles)
{
NativeMethods.CloseTrace(handle);
}
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="SerializationUtils.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
/// <summary>
/// Set of functions used for metrics serialization and deserialization.
/// </summary>
public static class SerializationUtils
{
/// <summary>
/// A string which replaces empty dimension value.
/// </summary>
public const string EmptyDimensionValueString = "__Empty";
/// <summary>
/// One minute size in 100th nanoseconds.
/// </summary>
public const long OneMinuteInterval = 600000000;
/// <summary>
/// Writes UInt16 value to the stream.
/// </summary>
/// <param name="stream"><see cref="Stream"/> to write to.</param>
/// <param name="value">Value to write.</param>
public static void Write(Stream stream, ushort value)
{
stream.WriteByte((byte)(value & 0xFF));
stream.WriteByte((byte)((value & 0xFF00) >> 8));
}
/// <summary>
/// Writes int value Base-128 encoded.
/// </summary>
/// <param name="writer">Binary writer to be used.</param>
/// <param name="value">Value to write.</param>
public static void WriteInt32AsBase128(BinaryWriter writer, int value)
{
WriteInt64AsBase128(writer, value);
}
/// <summary>
/// Writes long value Base-128 encoded.
/// </summary>
/// <param name="writer">Binary writer to be used.</param>
/// <param name="value">Value to write.</param>
public static void WriteInt64AsBase128(BinaryWriter writer, long value)
{
var negative = value < 0;
var t = negative ? -value : value;
var first = true;
do
{
byte b;
if (first)
{
b = (byte)(t & 0x3f);
t >>= 6;
if (negative)
{
b = (byte)(b | 0x40);
}
first = false;
}
else
{
b = (byte)(t & 0x7f);
t >>= 7;
}
if (t > 0)
{
b |= 0x80;
}
writer.Write(b);
}
while (t > 0);
}
/// <summary>
/// Writes uint value Base-128 encoded.
/// </summary>
/// <param name="writer">Binary writer to be used.</param>
/// <param name="value">Value to write.</param>
public static void WriteUInt32AsBase128(BinaryWriter writer, uint value)
{
WriteUInt64AsBase128(writer, value);
}
/// <summary>
/// Writes uint value as 4 bytes, but Base-128 encoded.
/// Used for back compatibility between some versions of serializer.
/// </summary>
/// <param name="writer">Binary writer to be used.</param>
/// <param name="value">Value to write.</param>
public static void WriteUInt32InBase128AsFixed4Bytes(BinaryWriter writer, uint value)
{
var count = 0;
ulong t = value;
do
{
var b = (byte)(t & 0x7f);
t >>= 7;
++count;
if (t > 0 || count < 4)
{
b |= 0x80;
}
writer.Write(b);
}
while (t > 0);
for (; ++count <= 4;)
{
writer.Write(count == 4 ? (byte)0x0 : (byte)0x80);
}
}
/// <summary>
/// Writes ulong value Base-128 encoded.
/// </summary>
/// <param name="writer">Binary writer to be used.</param>
/// <param name="value">Value to write.</param>
public static void WriteUInt64AsBase128(BinaryWriter writer, ulong value)
{
var t = value;
do
{
var b = (byte)(t & 0x7f);
t >>= 7;
if (t > 0)
{
b |= 0x80;
}
writer.Write(b);
}
while (t > 0);
}
/// <summary>
/// Writes ulong value Base-128 encoded to the buffer starting from the specified offset.
/// </summary>
/// <param name="buffer">Buffer used for writing.</param>
/// <param name="offset">Offset to start with. Will be moved to the next byte after written.</param>
/// <param name="value">Value to write.</param>
public static void WriteUInt64AsBase128(byte[] buffer, ref int offset, ulong value)
{
var t = value;
do
{
var b = (byte)(t & 0x7f);
t >>= 7;
if (t > 0)
{
b |= 0x80;
}
buffer[offset++] = b;
}
while (t > 0);
}
/// <summary>
/// Writes a histogram.
/// </summary>
/// <param name="writer">Binary writer to be used.</param>
/// <param name="bucketsCount">The number of buckets in histogram (must be equal to the number of elements in buckets collection).</param>
/// <param name="buckets">Histogram buckets to write.</param>
/// <param name="hasHistogramSizePrefix">A flag indicating whether histogram size (in bytes)
/// should be put as a 4-bytes prefix in the beginning of the histogram data.</param>
public static void WriteHistogramDataHistogram(BinaryWriter writer, int bucketsCount, IEnumerable<KeyValuePair<ulong, uint>> buckets, bool hasHistogramSizePrefix = false)
{
var startPosition = writer.BaseStream.Position;
if (hasHistogramSizePrefix)
{
writer.Write(0);
}
WriteUInt32AsBase128(writer, (uint)bucketsCount);
int bucketsWritten = 0;
ulong prevKey = 0;
uint prevValue = 0;
var firstTime = true;
foreach (var bucket in buckets)
{
if (firstTime)
{
prevKey = bucket.Key;
prevValue = bucket.Value;
WriteUInt64AsBase128(writer, prevKey);
WriteUInt32AsBase128(writer, prevValue);
firstTime = false;
}
else
{
WriteUInt64AsBase128(writer, bucket.Key - prevKey);
WriteInt32AsBase128(writer, (int)bucket.Value - (int)prevValue);
prevKey = bucket.Key;
prevValue = bucket.Value;
}
++bucketsWritten;
}
if (bucketsCount != bucketsWritten)
{
throw new ArgumentException(
$"The actual number of buckets in the {nameof(buckets)} was {bucketsWritten}, the passed {nameof(bucketsCount)} is {bucketsCount}",
nameof(bucketsCount));
}
if (hasHistogramSizePrefix)
{
var endPosition = writer.BaseStream.Position;
writer.BaseStream.Position = startPosition;
writer.Write((int)(endPosition - startPosition - sizeof(int)));
writer.BaseStream.Position = endPosition;
}
}
/// <summary>
/// Writes collection of <see cref="HyperLogLogSketch"/> objects to a <see cref="BinaryWriter"/>.
/// </summary>
/// <param name="writer"><see cref="BinaryWriter"/> object used for writing.</param>
/// <param name="count">Number of sketches to write.</param>
/// <param name="data">Collection of sketch dimension - sketch value pairs.</param>
public static void WriteHyperLogLogSketches(BinaryWriter writer, uint count, IEnumerable<KeyValuePair<string, HyperLogLogSketch>> data)
{
var startPosition = writer.BaseStream.Position;
writer.Write(0);
WriteUInt32AsBase128(writer, count);
foreach (var sketch in data)
{
writer.Write(sketch.Key);
writer.Write(sketch.Value.BValue);
writer.Write(sketch.Value.Registers, 0, sketch.Value.Registers.Length);
}
var endPosition = writer.BaseStream.Position;
writer.BaseStream.Position = startPosition;
writer.Write((int)(endPosition - startPosition - sizeof(int)));
writer.BaseStream.Position = endPosition;
}
/// <summary>
/// Writes collection of <see cref="HyperLogLogSketch"/> objects to a <see cref="BinaryWriter"/>.
/// </summary>
/// <param name="writer"><see cref="BinaryWriter"/> object used for writing.</param>
/// <param name="count">Number of sketches to write.</param>
/// <param name="data">Collection of sketch dimension - sketch value pairs.</param>
/// <remarks>
/// Will be replaced by V1 once deserialization code reaches everywhere.
///
/// HLL data is serialized in two format:
/// Sparse HLL data format
/// Version = 2 as byte
/// BValue as byte
/// For all non-zero elements :
/// Position In Sketch(ushort), Value at that position(byte)
/// Full HLL data format
/// Version = 1 as byte
/// BValue as byte
/// For all elements :
/// Value at that position(byte)
/// </remarks>
public static void WriteHyperLogLogSketchesV2(BinaryWriter writer, uint count, IEnumerable<KeyValuePair<string, HyperLogLogSketch>> data)
{
var startPosition = writer.BaseStream.Position;
writer.Write(0);
WriteUInt32AsBase128(writer, count);
foreach (var sketch in data)
{
writer.Write(sketch.Key);
ushort nonZeroElements = 0;
for (int i = 0; i < sketch.Value.Registers.Length; i++)
{
if (sketch.Value.Registers[i] > 0)
{
nonZeroElements++;
}
}
// Position is 2 bytes, value is 1 byte, length of nonzero bytes is 2 bytes
if ((nonZeroElements * 2) + nonZeroElements + 2 < sketch.Value.Registers.Length)
{
// Sparse HLL
writer.Write((byte)2);
writer.Write(sketch.Value.BValue);
writer.Write(nonZeroElements);
for (ushort i = 0; i < sketch.Value.Registers.Length; i++)
{
if (sketch.Value.Registers[i] > 0)
{
writer.Write(i);
writer.Write(sketch.Value.Registers[i]);
}
}
}
else
{
// Non Sparse HLL
writer.Write((byte)1);
writer.Write(sketch.Value.BValue);
writer.Write(sketch.Value.Registers, 0, sketch.Value.Registers.Length);
}
}
var endPosition = writer.BaseStream.Position;
writer.BaseStream.Position = startPosition;
writer.Write((int)(endPosition - startPosition - sizeof(int)));
writer.BaseStream.Position = endPosition;
}
/// <summary>
/// Estimates a size in bytes of the uint value when written in Base-128.
/// </summary>
/// <param name="value">Value which size has to be estimated.</param>
/// <returns>The estimated size in bytes.</returns>
public static long EstimateUInt32InBase128Size(uint value)
{
return value <= 0x7F ? 1 : (value < 0x91011 ? 2 : (value < 0x1FFFFF ? 3 : 4));
}
/// <summary>
/// Reads int value stored in Base-128 encoding.
/// </summary>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <returns>Read value.</returns>
public static int ReadInt32FromBase128(BinaryReader reader)
{
return (int)ReadInt64FromBase128(reader);
}
/// <summary>
/// Reads long value stored in Base-128 encoding.
/// </summary>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <returns>Read value.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static long ReadInt64FromBase128(BinaryReader reader)
{
int dummy = 0;
return ReadInt64FromBase128(reader, ref dummy);
}
/// <summary>
/// Reads long value stored in Base-128 encoding.
/// </summary>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <param name="bytesRead">The number that is incremented each time a single byte is read.</param>
/// <returns>Read value.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static long ReadInt64FromBase128(BinaryReader reader, ref int bytesRead)
{
long val = 0;
var shift = 0;
byte b;
var first = true;
var negative = false;
do
{
if (first)
{
first = false;
b = reader.ReadByte();
bytesRead++;
val += (b & 0x3f) << shift;
negative = (b & 0x40) != 0;
shift += 6;
}
else
{
b = reader.ReadByte();
bytesRead++;
val += (long)(b & 0x7f) << shift;
shift += 7;
}
}
while ((b & 0x80) != 0);
return negative ? -val : val;
}
/// <summary>
/// Reads uint value stored in Base-128 encoding.
/// </summary>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <returns>Read value.</returns>
public static uint ReadUInt32FromBase128(BinaryReader reader)
{
return (uint)ReadUInt64FromBase128(reader);
}
/// <summary>
/// Reads uint value stored in Base-128 encoding.
/// </summary>
/// <param name="buffer">Buffer from which value to be read.</param>
/// <param name="offset">Offset in buffer to start reading from.</param>
/// <returns>Read value.</returns>
public static uint ReadUInt32FromBase128(byte[] buffer, ref int offset)
{
return (uint)ReadUInt64FromBase128(buffer, ref offset);
}
/// <summary>
/// Reads ulong value stored in Base-128 encoding.
/// </summary>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <returns>Read value.</returns>
public static ulong ReadUInt64FromBase128(BinaryReader reader)
{
ulong val = 0;
var shift = 0;
byte b;
do
{
b = reader.ReadByte();
val = val + ((ulong)(b & 0x7f) << shift);
shift += 7;
}
while ((b & 0x80) != 0);
return val;
}
/// <summary>
/// Reads ulong value stored in Base-128 encoding.
/// </summary>
/// <param name="buffer">Buffer from which value to be read.</param>
/// <param name="offset">Offset in buffer to start reading from.</param>
/// <returns>Read value.</returns>
public static ulong ReadUInt64FromBase128(byte[] buffer, ref int offset)
{
ulong val = 0;
var shift = 0;
byte b;
do
{
b = buffer[offset++];
val = val + ((ulong)(b & 0x7f) << shift);
shift += 7;
}
while ((b & 0x80) != 0);
return val;
}
/// <summary>
/// Reads a histogram.
/// </summary>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <param name="hasHistogramSizePrefix">A flag indicating whether histogram
/// data contains histogram size (in bytes) as a 4-bytes prefix.</param>
/// <returns>An enumerable of key values pairs where key is a sample value
/// and value is a number of times the sample value appeared.</returns>
public static IEnumerable<KeyValuePair<ulong, uint>> ReadHistogram(BinaryReader reader, bool hasHistogramSizePrefix = false)
{
if (hasHistogramSizePrefix)
{
// Not used now, but left for optimization later
reader.ReadInt32();
}
var size = ReadUInt32FromBase128(reader);
ulong prevKey = 0;
uint prevValue = 0;
for (var i = 0; i < size; ++i)
{
if (i == 0)
{
prevKey = ReadUInt64FromBase128(reader);
prevValue = ReadUInt32FromBase128(reader);
}
else
{
prevKey += ReadUInt64FromBase128(reader);
prevValue = (uint)(prevValue + ReadInt32FromBase128(reader));
}
yield return new KeyValuePair<ulong, uint>(prevKey, prevValue);
}
}
/// <summary>
/// Reads a histogram to a list.
/// </summary>
/// <param name="list">List to append to</param>
/// <param name="reader">Binary reader to be used for reading.</param>
/// <param name="hasHistogramSizePrefix">A flag indicating whether histogram
/// data contains histogram size (in bytes) as a 4-bytes prefix.</param>
public static void ReadHistogramTo(List<KeyValuePair<ulong, uint>> list, BinaryReader reader, bool hasHistogramSizePrefix = false)
{
list.Clear();
if (hasHistogramSizePrefix)
{
// Not used now, but left for optimization later
reader.ReadInt32();
}
var size = ReadUInt32FromBase128(reader);
ulong prevKey = 0;
uint prevValue = 0;
list.EnsureSpace((int)size, geometryGrowth: true);
for (var i = 0; i < size; ++i)
{
if (i == 0)
{
prevKey = ReadUInt64FromBase128(reader);
prevValue = ReadUInt32FromBase128(reader);
}
else
{
prevKey += ReadUInt64FromBase128(reader);
prevValue = (uint)(prevValue + ReadInt32FromBase128(reader));
}
list.Add(new KeyValuePair<ulong, uint>(prevKey, prevValue));
}
}
/// <summary>
/// Assigns the hyperloglog sketches.
/// </summary>
/// <param name="reader">Stream reader containing the sketches data.</param>
/// <param name="length">Length of data to read.</param>
/// <param name="sketchConstructor">Function used to construct <see cref="HyperLogLogSketch"/> object.</param>
/// <returns>An <see cref="IEnumerable{T}"/> of dimension name and hyperloglog sketch key value pair.</returns>
public static IEnumerable<KeyValuePair<string, HyperLogLogSketch>> ReadHyperLogLogSketch(BinaryReader reader, int length, Func<int, HyperLogLogSketch> sketchConstructor)
{
var size = ReadUInt32FromBase128(reader);
for (var i = 0; i < size; ++i)
{
var dimensionName = reader.ReadString();
short bValue;
var sketch = ReadHyperLogLogSketch(reader, sketchConstructor, out bValue);
yield return new KeyValuePair<string, HyperLogLogSketch>(dimensionName, sketch);
}
}
/// <summary>
/// Reads the hyperloglog sketch.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="sketchConstructor">The sketch constructor.</param>
/// <param name="bValue">B value.</param>
/// <returns>Deserialized sketch.</returns>
/// <remarks>
/// HLL data is serialized in three formats:
/// Legacy HLL data format
/// BValue as byte
/// For all elements :
/// Value at that position(byte)
/// Sparse HLL data format
/// Version = 2 as byte
/// BValue as byte
/// For all non-zero elements :
/// Position In Sketch(ushort), Value at that position(byte)
/// Full HLL data format
/// Version = 1 as byte
/// BValue as byte
/// For all elements :
/// Value at that position(byte)
/// </remarks>
public static HyperLogLogSketch ReadHyperLogLogSketch(BinaryReader reader, Func<int, HyperLogLogSketch> sketchConstructor, out short bValue)
{
var version = (short)reader.ReadByte();
if (version > 3)
{
// Legacy format when no version was present
bValue = version;
version = 0;
}
else
{
// All other formats has bValue as next element
bValue = reader.ReadByte();
}
var sketch = sketchConstructor(bValue);
if (version <= 1)
{
// Non-Sparse HLL
var registersSize = 1 << bValue;
for (var j = 0; j < registersSize; j++)
{
sketch[j] = reader.ReadByte();
}
}
else if (version == 2)
{
// Sparse HLL, hence format is position of nonzero items,value of nonzeroitem
var lengthOfNonZeroElements = reader.ReadUInt16();
for (var pos = 0; pos < lengthOfNonZeroElements; pos++)
{
var itemPosition = reader.ReadUInt16();
sketch[itemPosition] = reader.ReadByte();
}
}
return sketch;
}
/// <summary>
/// Reads data from source stream and writes it to destination stream.
/// </summary>
/// <param name="source">Source stream.</param>
/// <param name="destination">Destination stream.</param>
/// <param name="len">Length to copy.</param>
/// <param name="tempBuffer">Temp buffer.</param>
public static void ReadFromStream(Stream source, Stream destination, int len, byte[] tempBuffer)
{
var readBytes = 0;
while (readBytes < len)
{
var bytesToRead = Math.Min(tempBuffer.Length, len - readBytes);
source.Read(tempBuffer, 0, bytesToRead);
destination.Write(tempBuffer, 0, bytesToRead);
readBytes += bytesToRead;
}
}
/// <summary>
/// Ensure List has enough capacity for new addition to reduce resizing
/// </summary>
/// <typeparam name="T">Generic type</typeparam>
/// <param name="list">Existing list</param>
/// <param name="addCount">Estimated number of items to add</param>
/// <param name="geometryGrowth">True for geometry growth (50% more, O(N) algorithm),
/// False for no extra capacity (could lead to O(N^2) algorithm, use when memory is critical and this is not called often.).</param>
private static void EnsureSpace<T>(this List<T> list, int addCount, bool geometryGrowth = true)
{
if (addCount > 0)
{
int cap = list.Capacity;
if (cap == 0)
{
// New list, use toAdd
cap = addCount;
}
else
{
int newCount = list.Count + addCount;
// List has enough capacity
if (newCount <= cap)
{
return;
}
if (geometryGrowth)
{
// Geometry growth (add 50%)
cap = Math.Max(newCount, cap * 3 / 2);
}
else
{
cap = newCount;
}
}
list.Capacity = cap;
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="QueryLanguageResponseToDatatable.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
/// <summary>
/// Class which helps in converting query language response from FE to Datatable.
/// </summary>
internal static class QueryLanguageResponseToDatatable
{
/// <summary>
/// Converts the query language response to datatable.
/// Input looks like as follows :
/// {
/// "queryRequest": {
/// "metricIdentifier": {
/// "monitoringAccount": "MetricTeamInternalMetrics",
/// "metricNamespace": "Metrics.Server",
/// "metricName": "ClientAggregatedMetricCount"
/// },
/// "startTimeUtc": "0001-01-01T00:00:00.0000000Z",
/// "endTimeUtc": "0001-01-01T00:00:00.0000000Z",
/// "seriesResolutionInMinutes": 0,
/// "aggregationType": 0,
/// "numberOfResultsToReturn": 0,
/// "orderBy": 0,
/// "zeroAsNoValueSentinel": false,
/// "aggregateAcrossAccounts": false,
/// "lastValueMode": false
/// },
/// "endTimeUtc": "2019-01-29T06:48:00.0000000Z",
/// "startTimeUtc": "2019-01-29T05:48:00.0000000Z",
/// "timeResolutionInMinutes": 1,
/// "filteredTimeSeriesList": {
/// "$type": "Microsoft.Cloud.Metrics.Client.Query.FilteredTimeSeries[], Microsoft.Cloud.Metrics.Client",
/// "$values": [
/// {
/// "metricIdentifier": {
/// "monitoringAccount": "MetricTeamInternalMetrics",
/// "metricNamespace": "Metrics.Server",
/// "metricName": "ClientAggregatedMetricCount"
/// },
/// "dimensionList": {
/// "$type": "System.Collections.Generic.KeyValuePair`2[[System.String, mscorlib],[System.String, mscorlib]][], mscorlib",
/// "$values": [{"key":"datacenter","value":"mdmtest1-black"}]
/// },
/// "evaluatedResult": "NaN",
/// "timeSeriesValues": {
/// "$type": "System.Collections.Generic.KeyValuePair`2[[Microsoft.Cloud.Metrics.Client.Metrics.SamplingType, Microsoft.Cloud.Metrics.Client],[System.Double[], mscorlib]][], mscorlib",
/// "$values": [
/// {
/// "key": {
/// "name": "Average"
/// },
/// "value": [
/// 265.10236098167132,
/// ]
/// }
/// ]
/// }
/// }
/// ]
/// },
/// "errorCode": 0,
/// "diagnosticInfo": {}
/// }
///
/// Output looks like as follows:
/// [
/// {
/// "TimestampUtc": "02/04/2019 07:11:00",
/// "AccountName": "MetricTeamInternalMetrics",
/// "MetricNamespace": "Metrics.Server",
/// "MetricName": "ClientAggregatedMetricCount",
/// "Datacenter": "EastUS2",
/// "Average": 70.083851254134714
/// },
/// {
/// "TimestampUtc": "02/04/2019 07:12:00",
/// "AccountName": "MetricTeamInternalMetrics",
/// "MetricNamespace": "Metrics.Server",
/// "MetricName": "ClientAggregatedMetricCount",
/// "Datacenter": "EastUS2",
/// "Average": 67.305346411549351
/// }
/// ]
/// </summary>
/// <param name="responseFromMetrics">Input data stream to convert to datatable.</param>
/// <returns>
/// Input data stream converted to datatable.
/// </returns>
public static JArray GetResponseAsTable(Stream responseFromMetrics)
{
using (var reader = new StreamReader(responseFromMetrics, Encoding.UTF8))
{
var jObject = (JObject)JsonSerializer.Create().Deserialize(reader, typeof(JObject));
JArray returnObject = JArray.Parse("[]");
var startTimeUtc = DateTime.Parse(jObject["startTimeUtc"].Value<string>());
var timeResolution = TimeSpan.FromMinutes(jObject["timeResolutionInMinutes"].Value<int>());
var outerValues = jObject["filteredTimeSeriesList"]["$values"] as JArray;
Dictionary<DateTime, JObject> rowMap = new Dictionary<DateTime, JObject>();
foreach (var outerValue in outerValues)
{
var innerValues = outerValue["timeSeriesValues"]["$values"] as JArray;
string accountName = outerValue["metricIdentifier"]["monitoringAccount"].Value<string>();
string metricNamespace = outerValue["metricIdentifier"]["metricNamespace"].Value<string>();
string metricName = outerValue["metricIdentifier"]["metricName"].Value<string>();
var dimensions = outerValue["dimensionList"]["$values"] as JArray;
rowMap.Clear();
foreach (var actualValue in innerValues)
{
var samplingType = actualValue["key"]["name"].Value<string>();
var dataPoints = actualValue["value"] as JArray;
var currentTimeStamp = startTimeUtc;
foreach (var val in dataPoints)
{
JObject row;
if (rowMap.ContainsKey(currentTimeStamp))
{
row = rowMap[currentTimeStamp];
}
else
{
row = JObject.Parse("{}");
row.Add("TimestampUtc", currentTimeStamp.ToString(DateTimeFormatInfo.InvariantInfo));
row.Add("i_AccountName", accountName);
row.Add("i_MetricNamespace", metricNamespace);
row.Add("i_MetricName", metricName);
foreach (var dim in dimensions)
{
row.Add(dim["key"].Value<string>(), dim["value"].Value<string>());
}
rowMap[currentTimeStamp] = row;
returnObject.Add(row);
}
var rowDimValue = val.Value<double>();
if (double.IsNaN(rowDimValue))
{
rowDimValue = 0;
}
row.Add(samplingType, rowDimValue);
currentTimeStamp = currentTimeStamp.Add(timeResolution);
}
}
}
return returnObject;
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricsServerRelativeUris.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
/// <summary>
/// The metrics server relative URIs.
/// </summary>
public static class MetricsServerRelativeUris
{
/// <summary>
/// The relative URL for metrics data
/// </summary>
public const string DataRelativeUrl = "v1/data/metrics";
/// <summary>
/// The relative URL for metrics meta-data a.k.a. hinting data
/// </summary>
public const string MetaDataRelativeUrl = "v1/hint";
/// <summary>
/// The relative URL for metrics meta-data a.k.a. hinting data V2.
/// </summary>
public const string MetaDataRelativeUrlV2 = "v2/hint";
/// <summary>
/// The relative URL for metrics configuration
/// </summary>
public const string ConfigRelativeUrl = "v1/config/metrics";
/// <summary>
/// The V2 relative URL for metrics configuration
/// </summary>
public const string ConfigRelativeUrlV2 = "v2/config/metrics";
/// <summary>
/// The relative URL for account configuration
/// </summary>
public const string TenantConfigRelativeUrl = "v1/config";
/// <summary>
/// The relative URL for health configuration.
/// </summary>
public const string HealthConfigRelativeUrl = "v2/config/health";
/// <summary>
/// The relative URL for monitoring account configuration
/// </summary>
public const string AccountConfigRelativeUrl = "v1/config/tenant";
/// <summary>
/// The relative URL for health controller
/// </summary>
public const string HealthRelativeUrl = "v3/data/health";
/// <summary>
/// The relative Url for distributed query.
/// </summary>
public const string DistributedQueryRelativeUrl = "flight/dq/batchedReadv3";
/// <summary>
/// The query service relative URL.
/// </summary>
public const string QueryServiceRelativeUrl = "query";
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ILocalAggregatedMetric.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
/// <summary>
/// The interface representing the locally aggregated metric in the ETW stream.
/// </summary>
public interface ILocalAggregatedMetric
{
/// <summary>
/// Gets the Monitoring Account to which this metric is reported.
/// </summary>
string MonitoringAccount { get; }
/// <summary>
/// Gets the metric namespace.
/// </summary>
string MetricNamespace { get; }
/// <summary>
/// Gets the metric name.
/// </summary>
string MetricName { get; }
/// <summary>
/// Gets the time in UTC when metric was reported.
/// </summary>
DateTime MetricTimeUtc { get; }
/// <summary>
/// Gets the dimension name-value dictionary.
/// </summary>
/// <remarks>The dimension names are case insensitive.</remarks>
IReadOnlyDictionary<string, string> Dimensions { get; }
/// <summary>
/// Gets the scaling factor applied to metric values.
/// </summary>
float ScalingFactor { get; }
/// <summary>
/// Gets the number of samples for which this metric is reported.
/// </summary>
uint Count { get; }
/// <summary>
/// Gets the scaled sum of sample values reported this metric.
/// </summary>
float ScaledSum { get; }
/// <summary>
/// Gets the scaled minimum value of samples reported this metric.
/// </summary>
float ScaledMin { get; }
/// <summary>
/// Gets the scaled maximum value of samples reported this metric.
/// </summary>
float ScaledMax { get; }
/// <summary>
/// Gets the sum of sample values reported this metric.
/// </summary>
ulong Sum { get; }
/// <summary>
/// Gets the minimum value of samples reported this metric.
/// </summary>
ulong Min { get; }
/// <summary>
/// Gets the maximum value of samples reported this metric.
/// </summary>
ulong Max { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IEtlDispatcher.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Defines the dispatchers of ETL files being periodically generated.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System.Collections.Generic;
/// <summary>
/// Defines the dispatchers of ETL files being periodically generated.
/// </summary>
/// <remarks>
/// The dispatcher will receive the ETL files to be dispatched to the ETL file subscribers of
/// a specific ETW collection.
/// </remarks>
internal interface IEtlDispatcher
{
/// <summary>
/// Receives an list with all available ETL files that could be submitted to
/// the subscriber. The dispatcher should expect this list to be sorted from
/// oldest to newest available ETL.
/// </summary>
/// <param name="availableEtlFiles">
/// Enumeration with all the available ETL files. The dispatcher willl do some filtering
/// before submitting then to the subscriber.
/// </param>
void EnqueueBacklogEtls(List<string> availableEtlFiles);
/// <summary>
/// Enqueues an ETL file for processing.
/// </summary>
/// <param name="etlFileName">
/// The ETL file to be enqueued for processing.
/// </param>
void EnqueueEtlFile(string etlFileName);
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IReadOnlyTDigest.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System.IO;
/// <summary>
/// Read-only tDigest interface
/// </summary>
public interface IReadOnlyTDigest
{
/// <summary>
/// Serialize a tdigest to a given writer
/// </summary>
/// <param name="writer">The writer to use</param>
void Serialize(BinaryWriter writer);
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="KqlMRequest.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using Newtonsoft.Json;
/// <summary>
/// The query request used when the customer queries with a free text query in the KQL-M query language.
/// </summary>
internal sealed class KqlMRequest
{
/// <summary>
/// Initializes a new instance of the <see cref="KqlMRequest"/> class.
/// Used for recreation with deserialization.
/// </summary>
public KqlMRequest()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="KqlMRequest"/> class.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metric">The metric name.</param>
/// <param name="startTimeUtc">The start time of the query.</param>
/// <param name="endTimeUtc">The end time of the query.</param>
/// <param name="queryExpression">The query expression to execute.</param>
[JsonConstructor]
public KqlMRequest(
string monitoringAccount,
string metricNamespace,
string metric,
DateTime startTimeUtc,
DateTime endTimeUtc,
string queryExpression)
{
this.MonitoringAccount = monitoringAccount;
this.MetricNamespace = metricNamespace;
this.Metric = metric;
this.StartTimeUtc = startTimeUtc;
this.EndTimeUtc = endTimeUtc;
this.QueryExpression = queryExpression;
}
/// <summary>
/// The monitoring account in the context of the request.
/// </summary>
public string MonitoringAccount { get; private set; }
/// <summary>
/// The metric namespace in the context of the request.
/// </summary>
public string MetricNamespace { get; private set; }
/// <summary>
/// The name of the metric in the context of the request.
/// </summary>
public string Metric { get; private set; }
/// <summary>
/// The start time of the query in UTC time.
/// </summary>
public DateTime StartTimeUtc { get; private set; }
/// <summary>
/// The end time of the query in UTC time.
/// </summary>
public DateTime EndTimeUtc { get; private set; }
/// <summary>
/// The query expression to execute.
/// </summary>
public string QueryExpression { get; private set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SpecialCharsHelper.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
/// <summary>
/// Helper class to escape and un-escape special chars
/// </summary>
public class SpecialCharsHelper
{
/// <summary>
/// Eescape the input string twice.
/// </summary>
/// <param name="input">The input string.</param>
/// <returns>The escaped string</returns>
public static string EscapeTwice(string input)
{
return Uri.EscapeDataString(Uri.EscapeDataString(input));
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Role.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// Determines what level of access an entity has to an MDM entity.
/// </summary>
public enum Role
{
/// <summary>
/// For certificates only - can read metric data but cannot make any changes.
/// </summary>
ReadOnly,
/// <summary>
/// This role has the ability to modify and create dashboards within an account.
/// </summary>
DashboardEditor,
/// <summary>
/// This role has the ability to modify metric, monitor or health configuration within an account.
/// </summary>
ConfigurationEditor,
/// <summary>
/// For certificates only - can publish metrics but cannot make other changes.
/// </summary>
MetricPublisher,
/// <summary>
/// Full access to modify configuration, account settings and dashboards.
/// </summary>
Administrator,
/// <summary>
/// This role has the ability to modify and create monitors within an account.
/// </summary>
MonitorEditor,
/// <summary>
/// This role has the ability to modify and create monitors/metrics within an account.
/// </summary>
MetricAndMonitorEditor
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IDistinctCountConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
/// <summary>
/// Configures distinct count feature on this preaggregate.
/// </summary>
public interface IDistinctCountConfiguration
{
/// <summary>
/// Gets the dimensions.
/// </summary>
IEnumerable<string> Dimensions { get; }
/// <summary>
/// Adds the dimension.
/// </summary>
/// <param name="dimension">The dimension.</param>
void AddDimension(string dimension);
/// <summary>
/// Removes the dimension.
/// </summary>
/// <param name="dimension">The dimension.</param>
void RemoveDimension(string dimension);
}
}<file_sep># About Metrics.MultiDimensionalMetricsClient and Metrics.Serialization
The two projects are copied from [EngSys-MDA-MetricsAndHealth](https://msazure.visualstudio.com/DefaultCollection/One/_git//EngSys-MDA-MetricsAndHealth).
We need the support from package `Microsoft.Cloud.Metrics.Client`, however, it was built againts .NET Framework and not compatible with .NET Core.
So we copied the relative source from the Metrics repo, and fixed the code so that it works in .NET Core.
The following are the list of modifications:
* Nuget dependencies for both project
* Metrics.MultiDimensionalMetricsClient
* Utility/UserAccessTokenRefresher.cs
* Line 81, `new PlatformParameters()` - removed constructor arguments
* Utility/HttpClientHelper.cs
* `WebRequestHandler` to `HttpClientHandler`
* `WebRequestHandler.ServerCertificateValidationCallback` to `HttpClientHandler.ServerCertificateCustomValidationCallback`<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="CompositeMetricConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
using Online.Metrics.Serialization;
/// <summary>
/// Represents a composite metric in the MDM System.
/// </summary>
public sealed class CompositeMetricConfiguration : ICompositeMetricConfiguration
{
private readonly List<CompositeMetricSource> metricSources;
private readonly List<CompositeExpression> compositeExpressions;
/// <summary>
/// Initializes a new instance of the <see cref="CompositeMetricConfiguration"/> class.
/// </summary>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="name">The name.</param>
/// <param name="lastUpdatedTime">The last updated time.</param>
/// <param name="lastUpdatedBy">The last updated by.</param>
/// <param name="version">The version.</param>
/// <param name="treatMissingSeriesAsZeroes">if set to <c>true</c> treat missing series as zeroes.</param>
/// <param name="description">The description of the metric.</param>
/// <param name="metricSources">The metric sources.</param>
/// <param name="compositeExpressions">The composite expressions.</param>
[JsonConstructor]
internal CompositeMetricConfiguration(
string metricNamespace,
string name,
DateTime lastUpdatedTime,
string lastUpdatedBy,
uint version,
bool treatMissingSeriesAsZeroes,
string description,
IEnumerable<CompositeMetricSource> metricSources,
IEnumerable<CompositeExpression> compositeExpressions)
{
this.MetricNamespace = metricNamespace;
this.Name = name;
this.LastUpdatedTime = lastUpdatedTime;
this.LastUpdatedBy = lastUpdatedBy;
this.Version = version;
this.TreatMissingSeriesAsZeroes = treatMissingSeriesAsZeroes;
this.Description = description;
this.metricSources = metricSources.ToList();
this.compositeExpressions = compositeExpressions.ToList();
}
/// <summary>
/// The namespace of the metric.
/// </summary>
public string MetricNamespace { get; }
/// <summary>
/// The name of the metric.
/// </summary>
public string Name { get; }
/// <summary>
/// The last updated time of the metric.
/// </summary>
public DateTime LastUpdatedTime { get; }
/// <summary>
/// The last entity to update the metric.
/// </summary>
public string LastUpdatedBy { get; }
/// <summary>
/// The version of the metric.
/// </summary>
public uint Version { get; }
/// <summary>
/// Gets the description of the metric.
/// </summary>
public string Description { get; }
/// <summary>
/// Gets the metric sources.
/// </summary>
public IEnumerable<CompositeMetricSource> MetricSources
{
get { return this.metricSources; }
}
/// <summary>
/// Gets the composite expressions.
/// </summary>
public IEnumerable<CompositeExpression> CompositeExpressions
{
get { return this.compositeExpressions; }
}
/// <summary>
/// Gets or sets a value indicating whether to treat missing series as zeroes.
/// </summary>
public bool TreatMissingSeriesAsZeroes { get; set; }
/// <summary>
/// Creates the composite metric configuration.
/// </summary>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metric">The metric.</param>
/// <param name="metricSources">The metric sources.</param>
/// <param name="expressions">The expressions.</param>
/// <param name="treatMissingSeriesAsZeroes">if set to <c>true</c> [treat missing series as zeroes].</param>
/// <param name="description">The optional description of the metric.</param>
/// <returns>The composite metric configuration.</returns>
public static CompositeMetricConfiguration CreateCompositeMetricConfiguration(
string metricNamespace,
string metric,
IEnumerable<CompositeMetricSource> metricSources,
IEnumerable<CompositeExpression> expressions,
bool treatMissingSeriesAsZeroes,
string description = "")
{
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrWhiteSpace(metric))
{
throw new ArgumentNullException(nameof(metric));
}
if (metricSources == null)
{
throw new ArgumentNullException(nameof(metricSources));
}
if (expressions == null)
{
throw new ArgumentNullException(nameof(expressions));
}
if (description.Length > SerializationConstants.MaximumMetricDescriptionLength)
{
throw new ArgumentOutOfRangeException(
nameof(description),
$"The metric description cannot be greater than {SerializationConstants.MaximumMetricDescriptionLength} characters.");
}
return new CompositeMetricConfiguration(metricNamespace, metric, DateTime.MinValue, string.Empty, 0, treatMissingSeriesAsZeroes, description, metricSources, expressions);
}
/// <summary>
/// Adds the metric source.
/// </summary>
/// <param name="metricSource">The metric source.</param>
public void AddMetricSource(CompositeMetricSource metricSource)
{
if (metricSource == null)
{
throw new ArgumentNullException(nameof(metricSource));
}
if (this.metricSources.Any(x => string.Equals(x.DisplayName, metricSource.DisplayName, StringComparison.OrdinalIgnoreCase)))
{
throw new ConfigurationValidationException("Cannot add metric sources with duplicate names.", ValidationType.DuplicateMetricSource);
}
this.metricSources.Add(metricSource);
}
/// <summary>
/// Removes the metric source.
/// </summary>
/// <param name="metricSourceName">The metric source name.</param>
public void RemoveMetricSource(string metricSourceName)
{
this.metricSources.RemoveAll(x => string.Equals(x.DisplayName, metricSourceName, StringComparison.OrdinalIgnoreCase));
}
/// <summary>
/// Adds the expression.
/// </summary>
/// <param name="expression">The expression.</param>
public void AddExpression(CompositeExpression expression)
{
if (expression == null)
{
throw new ArgumentNullException(nameof(expression));
}
if (this.compositeExpressions.Any(x => string.Equals(x.Name, expression.Name, StringComparison.OrdinalIgnoreCase)))
{
throw new ConfigurationValidationException("Cannot add composite expressions with duplicate names.", ValidationType.DuplicateSamplingType);
}
this.compositeExpressions.Add(expression);
}
/// <summary>
/// Removes the expression.
/// </summary>
/// <param name="expressionName">The expression name.</param>
public void RemoveExpression(string expressionName)
{
this.compositeExpressions.RemoveAll(x => string.Equals(x.Name, expressionName, StringComparison.OrdinalIgnoreCase));
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="EtwSessionManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Types that encapsulate an ETW session.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
/// <summary>
/// Helper class that manages ETW sessions.
/// </summary>
internal static unsafe class EtwSessionManager
{
/// <summary>
/// Maximum file name size.
/// </summary>
private const uint MaxNameSize = 1024;
/// <summary>
/// Size of the extension area where session and file name can be added to the end of NativeMethods.EventTraceProperties.
/// </summary>
/// <remarks>
/// It accounts for 2 strings (which can be up to MaxNameSize in length).
/// </remarks>
private const uint ExtSize = 2 * MaxNameSize * sizeof(char);
/// <summary>
/// Maximum number of ETW sessions that can be handled by the QueryAllTraces API.
/// </summary>
private const uint MaxSessionsByQueryAllTraces = 64;
/// <summary>
/// Keeps the size of the managed version of the EventTraceProperties struct.
/// </summary>
private static readonly uint TracePropertiesSize;
/// <summary>
/// A shared buffer to be used by static methods to avoid allocation and de-allocation on every call.
/// </summary>
[SuppressMessage("Microsoft.Reliability", "CA2006:UseSafeHandleToEncapsulateNativeResources", Justification = "Not accessible to any 3rd-party MS or not")]
private static IntPtr sharedBuffer;
/// <summary>
/// A object to serve as a lock for the sharing of the sharedBuffer.
/// </summary>
private static object sharedBufferLock = new object();
/// <summary>
/// Keeps track of the size of the shared buffer.
/// </summary>
private static int sharedBufferSize;
/// <summary>
/// Keeps track of the number of expected active ETW sessions when querying all sessions.
/// </summary>
private static uint expectedActiveEtwSessions = 32;
/// <summary>
/// Initializes static members of the <see cref="EtwSessionManager"/> class.
/// </summary>
static EtwSessionManager()
{
TracePropertiesSize = (uint)Marshal.SizeOf(typeof(NativeMethods.EventTraceProperties));
sharedBufferSize = (int)(TracePropertiesSize + ExtSize);
sharedBuffer = Marshal.AllocHGlobal(sharedBufferSize);
}
/// <summary>
/// Attempts to stop the specified session exists on the system.
/// </summary>
/// <param name="sessionName">
/// Name of the session to be stopped.
/// </param>
/// <returns>
/// True if the given session was stopped false otherwise.
/// </returns>
public static bool Stop(string sessionName)
{
const int EtwSessionNotFound = 4201;
int result;
ControlTrace(sessionName, NativeMethods.TraceControl.Stop, out result);
return result == NativeMethods.ErrorSuccess || result == EtwSessionNotFound;
}
/// <summary>
/// Attempts to retrieve the properties (including status) of the given ETW session.
/// </summary>
/// <param name="sessionName">
/// The name for the session which the caller wants to retrieve the properties.
/// </param>
/// <param name="traceProperties">
/// The <see cref="NativeMethods.EventTraceProperties"/> instance describing the session passed
/// by the caller. It contains valid data only if the method returns true;.
/// </param>
/// <returns>
/// True if the function retrieved the session properties, false otherwise.
/// </returns>
public static bool TryGetSessionProperties(string sessionName, out NativeMethods.EventTraceProperties traceProperties)
{
int result;
traceProperties = ControlTrace(sessionName, NativeMethods.TraceControl.Query, out result);
return result == NativeMethods.ErrorSuccess;
}
/// <summary>
/// Attempts to retrieve the active ETW sessions on the box.
/// </summary>
/// <returns>
/// An array with the name of the active ETW sessions on the box.
/// </returns>
public static string[] GetSessionNames()
{
lock (sharedBufferLock)
{
uint actualSessions = 0;
var retryInCaseOfSmallBuffer = true;
int result = (int)NativeMethods.ErrorSuccess;
while (retryInCaseOfSmallBuffer)
{
var requiredSize = expectedActiveEtwSessions * (TracePropertiesSize + ExtSize);
if (sharedBufferSize < requiredSize)
{
ReallocateSharedBufferSize((int)requiredSize);
}
NativeMethods.ZeroMemory(sharedBuffer, requiredSize);
var pointerArray = new IntPtr[expectedActiveEtwSessions];
for (int i = 0; i < expectedActiveEtwSessions; ++i)
{
var traceProperties = (NativeMethods.EventTraceProperties*)(
(char*)sharedBuffer + (i * (TracePropertiesSize + ExtSize)));
traceProperties->LoggerNameOffset = TracePropertiesSize;
traceProperties->LogFileNameOffset = TracePropertiesSize + (MaxNameSize * sizeof(char));
traceProperties->Wnode.BufferSize = TracePropertiesSize + ExtSize;
pointerArray[i] = (IntPtr)traceProperties;
}
fixed (void* pointersToPropertyArray = pointerArray)
{
result = NativeMethods.QueryAllTracesW(
pointersToPropertyArray, expectedActiveEtwSessions, ref actualSessions);
// There is a bug in QueryAllTracesW: it returns success even when there wasn't space to capture
// all active sessions and actualSessions ends up with the same value passed as expectedActiveEtwSessions.
// In general QueryAllTracesW does not return ERROR_MORE_DATA when there
// are more sessions than EVENT_TRACE_PROPERTIES in the array, so if the returned number of
// actual sessions is equal to the number of expected sessions it is necessary to try again
// until an array with more EVENT_TRACE_PROPERTIES pointers than actual sessions is passed.
if ((result == NativeMethods.ErrorMoreData || expectedActiveEtwSessions == actualSessions) &&
expectedActiveEtwSessions < MaxSessionsByQueryAllTraces)
{
expectedActiveEtwSessions = (expectedActiveEtwSessions < actualSessions)
? actualSessions + 1
: 2 * expectedActiveEtwSessions;
expectedActiveEtwSessions = Math.Max(expectedActiveEtwSessions, MaxSessionsByQueryAllTraces);
}
else
{
retryInCaseOfSmallBuffer = false;
}
}
}
if (result != NativeMethods.ErrorSuccess)
{
throw new Win32Exception(result, "Error calling QueryAllTracesW (0x" + result.ToString("X8") + ")");
}
// Capture the session names from the EVENT_TRACE_PROPERTIES structures.
var sessionNames = new string[actualSessions];
for (int i = 0; i < actualSessions; ++i)
{
var traceProperties = (char*)sharedBuffer + (int)(i * (TracePropertiesSize + ExtSize));
sessionNames[i] = new string(
traceProperties + (int)((NativeMethods.EventTraceProperties*)traceProperties)->LoggerNameOffset);
}
return sessionNames;
}
}
/// <summary>
/// Helper method that allows to get information about the given provider on the specified ETW session.
/// </summary>
/// <param name="loggerId">
/// The id of the ETW session for which the provider settings are going to be retrieved. This id can
/// be retrieved using the return of the GetSessionProperties method via the <c>Wnode.HistoricalContext</c>
/// field.
/// </param>
/// <param name="providerId">
/// The id of the provider for which the session settings should be retrieved.
/// </param>
/// <param name="enableInfo">
/// The struct containing information with the provider in the specified session.
/// The struct just contains valid data if the method returns true.
/// </param>
/// <returns>
/// True if the provider was enabled on the session, false otherwise.
/// </returns>
public static bool GetProviderInfo(ulong loggerId, Guid providerId, out NativeMethods.TraceEnableInfo enableInfo)
{
var foundProvider = false;
enableInfo = default(NativeMethods.TraceEnableInfo);
lock (sharedBufferLock)
{
int requiredBufferSize = 0;
void* bufferPtr = null;
var retryInCaseOfSmallBuffer = true;
int result = (int)NativeMethods.ErrorSuccess;
while (retryInCaseOfSmallBuffer)
{
bufferPtr = sharedBuffer.ToPointer();
result = NativeMethods.EnumerateTraceGuidsEx(
NativeMethods.TraceQueryInfoClass.TraceGuidQueryInfo,
&providerId,
sizeof(Guid),
bufferPtr,
sharedBufferSize,
ref requiredBufferSize);
if (result != NativeMethods.ErrorInsufficientBuffer)
{
retryInCaseOfSmallBuffer = false;
}
else
{
ReallocateSharedBufferSize(requiredBufferSize);
}
}
if (result == NativeMethods.ErrorWmiGuidNotFound)
{
// Provider not found
return false;
}
if (result != NativeMethods.ErrorSuccess)
{
throw new Win32Exception(result, "Error calling EnumerateTraceGuidsEx (0x" + result.ToString("X8") + ")");
}
var traceGuidInfo = *((NativeMethods.TraceGuidInfo*)bufferPtr);
bufferPtr = ((byte*)bufferPtr) + sizeof(NativeMethods.TraceGuidInfo);
for (int i = 0; i < traceGuidInfo.InstanceCount && !foundProvider; ++i)
{
var traceProviderInstanceInfo = *((NativeMethods.TraceProviderInstanceInfo*)bufferPtr);
if (traceProviderInstanceInfo.EnableCount > 0)
{
var traceEnableInfoPtr = (NativeMethods.TraceEnableInfo*)(
(byte*)bufferPtr + sizeof(NativeMethods.TraceProviderInstanceInfo));
for (int j = 0; j < traceProviderInstanceInfo.EnableCount; ++j)
{
// Only add information for the expected session
if (traceEnableInfoPtr->LoggerId == loggerId)
{
enableInfo = *traceEnableInfoPtr;
foundProvider = true;
break;
}
traceEnableInfoPtr++;
}
}
bufferPtr = (byte*)bufferPtr + traceProviderInstanceInfo.NextOffset;
}
}
return foundProvider;
}
/// <summary>
/// Attempts to get the current ETL file name of the given ETW session.
/// </summary>
/// <param name="sessionName">
/// Name of the session to retrieve the current file.
/// </param>
/// <param name="currentSessionFile">
/// Name of the ETL file currently being used by the session.
/// </param>
/// <returns>
/// True if the operation to retrieve the current ETL file of the session was successful, false otherwise.
/// </returns>
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Not accessible to any 3rd-party MS or not")]
public static bool TryGetCurrentFileOfSession(string sessionName, out string currentSessionFile)
{
currentSessionFile = null;
lock (sharedBufferLock)
{
var traceProperties = new NativeMethods.EventTraceProperties
{
LoggerNameOffset = TracePropertiesSize,
LogFileNameOffset = TracePropertiesSize + (MaxNameSize * sizeof(char)),
Wnode = new NativeMethods.WnodeHeader
{
BufferSize = TracePropertiesSize + ExtSize,
}
};
NativeMethods.ZeroMemory(sharedBuffer, traceProperties.Wnode.BufferSize);
Marshal.StructureToPtr(traceProperties, sharedBuffer, true);
var errorCode = NativeMethods.ControlTrace(
0,
sessionName,
sharedBuffer,
(int)NativeMethods.TraceControl.Query);
var tracePropertiesPtr = (NativeMethods.EventTraceProperties*)sharedBuffer;
if (errorCode == NativeMethods.ErrorSuccess && tracePropertiesPtr->LogFileNameOffset > 0)
{
currentSessionFile = new string((char*)(tracePropertiesPtr + (int)tracePropertiesPtr->LogFileNameOffset));
}
return errorCode == NativeMethods.ErrorSuccess && currentSessionFile != null;
}
}
/// <summary>
/// Sends the specified control to the given session, captures the result of the
/// call to ControlTrace and return the struct with the trace properties (its contents
/// depend on the control passed to the function).
/// </summary>
/// <param name="sessionName">
/// Name of the session to have the control applied against.
/// </param>
/// <param name="traceControl">
/// Control to be applied against the session.
/// </param>
/// <param name="errorCode">
/// Output parameter that receives the result of the call to ControlTrace.
/// </param>
/// <returns>
/// The struct with the trace properties (its contents depend on the control passed to the function).
/// </returns>
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Not accessible to any 3rd-party MS or not")]
private static NativeMethods.EventTraceProperties ControlTrace(
string sessionName, NativeMethods.TraceControl traceControl, out int errorCode)
{
lock (sharedBufferLock)
{
var traceProperties = new NativeMethods.EventTraceProperties
{
LoggerNameOffset = TracePropertiesSize,
LogFileNameOffset = TracePropertiesSize + (MaxNameSize * sizeof(char)),
Wnode = new NativeMethods.WnodeHeader
{
BufferSize = TracePropertiesSize + ExtSize,
}
};
NativeMethods.ZeroMemory(sharedBuffer, traceProperties.Wnode.BufferSize);
Marshal.StructureToPtr(traceProperties, sharedBuffer, true);
errorCode = NativeMethods.ControlTrace(
0,
sessionName,
sharedBuffer,
(uint)traceControl);
traceProperties = (NativeMethods.EventTraceProperties)Marshal.PtrToStructure(
sharedBuffer, typeof(NativeMethods.EventTraceProperties));
return traceProperties;
}
}
/// <summary>
/// Helper that allows change in the size of the shared buffer pointer.
/// </summary>
/// <param name="newBufferSize">
/// New desired buffer size.
/// </param>
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Not accessible to any 3rd-party MS or not")]
private static void ReallocateSharedBufferSize(int newBufferSize)
{
lock (sharedBufferLock)
{
var newSize = Math.Max(sharedBufferSize, newBufferSize);
Marshal.FreeHGlobal(sharedBuffer);
sharedBufferSize = newSize;
sharedBuffer = Marshal.AllocHGlobal(sharedBufferSize);
}
}
}
}
<file_sep>//---------------------------------------------------------------------------------
// <copyright file="ActiveCollector.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//---------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Security;
using System.Text;
using Logging;
/// <summary>
/// An instance of this type represents an active collector.
/// </summary>
/// <remarks>
/// The type is dissociated from its configuration since this can be updated or changed manually
/// outside of the control of the instance (e.g.: via <c>logman</c> by the user). So this type does not
/// carry the configuration information instead it directly queries the system for the respective
/// ETW session whenever it needs to retrieve the current settings.
/// </remarks>
/// <remarks>
/// It started to be implemented using only logman but the functionality to retrieve the providers
/// of an ETW session would require text parsing. In order to avoid that this class also uses the
/// PLA API.
/// </remarks>
internal sealed class ActiveCollector
{
/// <summary>
/// Exit code value that indicates success on the call to a program.
/// </summary>
private const int ExitCodeSuccess = 0;
/// <summary>
/// Custom log id to be used in the log statements.
/// </summary>
private static readonly object LogId = Logger.CreateCustomLogId("ActiveCollector");
/// <summary>
/// Root directory where the ETL files created and manipulated via this instance are going to be created.
/// </summary>
private readonly string baseEtlLocation;
/// <summary>
/// Flag to indicate if this is a file collector or not.
/// </summary>
private bool isFileCollector;
/// <summary>
/// For collectors writing to files this keeps the base name for the ETL files.
/// It should be used only for file collectors.
/// </summary>
private string etlBaseName;
/// <summary>
/// Keeps the maximum number of files that should be kept for collectors writing
/// ETL files. It should be used only for file collectors.
/// </summary>
private int maxFileCount;
/// <summary>
/// Keeps the maximum size that is desired for each ETL file.
/// It should be used only for file collectors.
/// </summary>
private int maxFileSizeKB;
/// <summary>
/// Keeps the desired maximum time span of each ETL file.
/// It should be used only for file collectors.
/// </summary>
private TimeSpan maxFileTimeSpan;
/// <summary>
/// Gets the time that an actual file rotation was performed.
/// </summary>
private DateTime lastRotationTime;
/// <summary>
/// Keeps track of the current ETL file of the collector, it will be null if it is a real-time collector.
/// </summary>
private string currentEtlSessionFile;
/// <summary>
/// Initializes a new instance of the <see cref="ActiveCollector"/> class.
/// </summary>
/// <param name="sessionName">
/// The name of the ETW session to be used by the collector.
/// </param>
/// <param name="baseEtlLocation">
/// If the collector creates ETL files this is the base location where they should be created.
/// </param>
public ActiveCollector(string sessionName, string baseEtlLocation = ".")
{
if (string.IsNullOrEmpty(sessionName))
{
throw new ArgumentException("sessionName cannot be null or empty.", "sessionName");
}
if (string.IsNullOrEmpty(baseEtlLocation))
{
throw new ArgumentException("baseEtlLocation cannot be null or empty.", "baseEtlLocation");
}
this.Name = sessionName;
this.lastRotationTime = DateTime.MaxValue;
this.baseEtlLocation = baseEtlLocation;
}
/// <summary>
/// Gets the name of the session associated to the collector.
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Get the directory where ETL files for this collector should be written.
/// It should be used only for file collectors.
/// </summary>
public string EtlLogsDirectory { get; private set; }
/// <summary>
/// Stops collector with the given sessionName.
/// </summary>
/// <param name="collectorName">
/// Name of the collector to be stopped.
/// </param>
/// <returns>
/// True if the operation succeeded, false otherwise.
/// </returns>
public static bool StopCollector(string collectorName)
{
const string MethodName = "StopCollector";
if (string.IsNullOrEmpty(collectorName))
{
throw new ArgumentException("collectorName cannot be null or empty.", "collectorName");
}
Logger.Log(
LoggerLevel.Info,
LogId,
MethodName,
"Attempting to stop ETW session [{0}]",
collectorName);
if (!EtwSessionManager.Stop(collectorName))
{
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Failed to stop ETW session [{0}]",
collectorName);
return false;
}
Logger.Log(
LoggerLevel.Info,
LogId,
MethodName,
"ETW session [{0}] was stopped",
collectorName);
return true;
}
/// <summary>
/// Update the collector session with the provider settings specified on the configuration.
/// </summary>
/// <param name="config">
/// The configuration of the collector.
/// </param>
/// <returns>True if update providers succeeded.</returns>
public static bool TryUpdateProviders(CollectorConfiguration config)
{
bool result = true;
foreach (var provider in config.Providers.Values)
{
var args = string.Format(
CultureInfo.InvariantCulture,
"update \"{0}\" -p {1} 0x{2},0x{3} {4} -ets",
config.Name,
provider.Id.ToString("B"),
provider.KeywordsAny.ToString("X16"),
provider.KeywordsAll.ToString("X16"),
((int)provider.Level).ToString(CultureInfo.InvariantCulture));
var exitCode = RunCommand("logman", args);
if (exitCode != ExitCodeSuccess)
{
Logger.Log(
LoggerLevel.Error,
LogId,
"UpdateProviders",
"Failed to set provider {0} on collector [{1}]. Error code: 0x{2}",
provider.Id.ToString("B"),
config.Name,
exitCode.ToString("X8"));
result = false;
}
}
return result;
}
/// <summary>
/// Starts the collector with the given configuration.
/// </summary>
/// <param name="config">
/// The configuration of the collector to be started.
/// </param>
/// <returns>
/// The complete list with the back log of ETL files associated to the collector from oldest to newest.
/// </returns>
public List<string> StartCollector(CollectorConfiguration config)
{
const string MethodName = "StartCollector";
var etlBacklog = new List<string>();
// If a deprecated session exists try to shut it down
NativeMethods.EventTraceProperties traceProperties;
if (!string.IsNullOrEmpty(config.DeprecatedCollector))
{
StopCollector(config.DeprecatedCollector);
}
var clockType = config.ClockType == ClockType.Default ? ClockType.Perf : config.ClockType;
var preExistingSession = false;
if (EtwSessionManager.TryGetSessionProperties(this.Name, out traceProperties))
{
if ((NativeMethods.EtwSessionClockType)clockType == traceProperties.Wnode.ClientContext)
{
// Old session can be re-used.
preExistingSession = true;
}
else
{
// Old session needs to be stopped. Note that if stop failed there is a follow up check to ensure that
// the error was not due to the session being stopped between the checks.
if (!StopCollector(this.Name))
{
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Failed to stop existing trace session [{0}]. Cannot proceed to correctly update session.",
this.Name);
return etlBacklog;
}
}
}
StringBuilder sb = new StringBuilder(256);
sb.AppendFormat(
CultureInfo.InvariantCulture,
"{0} \"{1}\" -nb {2} {3} -bs {4} -ft {5} -ct {6} -ets ",
preExistingSession ? "update" : "start",
config.Name,
config.MinBufferCount.ToString(CultureInfo.InvariantCulture),
config.MaxBufferCount.ToString(CultureInfo.InvariantCulture),
config.BufferSizeKB.ToString(CultureInfo.InvariantCulture),
config.FlushTimerSec.ToString(CultureInfo.InvariantCulture),
clockType);
if (config.SessionType == SessionType.Realtime)
{
sb.Append("-rt");
}
else
{
// Add parameters for proper ETL file rotation
this.isFileCollector = true;
this.maxFileCount = config.MaxFileCount;
this.maxFileSizeKB = config.MaxFileSizeMB * 1024;
this.maxFileTimeSpan = config.MaxFileTimeSpan;
this.etlBaseName = config.OriginalName;
this.EtlLogsDirectory = Path.Combine(this.baseEtlLocation, config.OriginalName);
if (!ProtectedIO(
() => Directory.CreateDirectory(this.EtlLogsDirectory),
e =>
{
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Failed to create directory [{0}] for collector [{1}]. Exception: {2}",
this.EtlLogsDirectory,
config.Name,
e);
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Failed to create or update ETW session [{0}]",
config.Name);
}))
{
return etlBacklog;
}
// Get the full list of available ETL files, it will be trimmed according to success of update command.
etlBacklog = this.GetExistingEtlFiles();
this.currentEtlSessionFile = this.GenerateNextSessionFileName();
sb.AppendFormat(CultureInfo.InvariantCulture, "-mode Sequential -o \"{0}\"", this.currentEtlSessionFile);
if (config.SessionType == SessionType.FileAndRealtime
|| config.SessionType == SessionType.RealtimeAndFile)
{
sb.Append(" -rt");
}
}
var exitCode = RunCommand("logman", sb.ToString());
this.lastRotationTime = DateTime.UtcNow;
if (exitCode != ExitCodeSuccess)
{
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Logman failed to create or update ETW session [{0}].",
config.Name);
if (preExistingSession)
{
if (config.SessionType != SessionType.Realtime)
{
if (!EtwSessionManager.TryGetCurrentFileOfSession(config.Name, out this.currentEtlSessionFile))
{
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Failed to retrieve name of the ETL being used by ETW session [{0}].",
config.Name);
}
if (etlBacklog.Count > 0
&& string.Compare(
etlBacklog.Last(), this.currentEtlSessionFile, StringComparison.OrdinalIgnoreCase) == 0)
{
etlBacklog.RemoveAt(etlBacklog.Count - 1);
Logger.Log(
LoggerLevel.Warning,
LogId,
MethodName,
"Current ETL file removed from backlog list since it is still in use. ETL File [{0}]",
this.currentEtlSessionFile);
}
}
Logger.Log(
LoggerLevel.Info,
LogId,
MethodName,
"Attempting to mitigate with pre-existing session...");
if (!ExistingSessionSatisfiesProviders(config))
{
Logger.Log(
LoggerLevel.Error,
LogId,
"StartCollector",
"Pre-existing session cannot be used since it does not satisfy the config.");
}
else
{
Logger.Log(
LoggerLevel.Error,
LogId,
MethodName,
"Using pre-existing ETW session since it satisfied the configuration.");
exitCode = ExitCodeSuccess;
}
}
}
if (exitCode == ExitCodeSuccess || preExistingSession)
{
Logger.Log(
LoggerLevel.Info,
LogId,
MethodName,
"ETW session is in place call UpdateProviders to enable them.");
}
etlBacklog.Sort(StringComparer.OrdinalIgnoreCase);
return etlBacklog;
}
/// <summary>
/// Rotates the session ETL file according to the current client configuration, taking care of
/// deleting old files if necessary. Returns true if a file was actually rotated.
/// </summary>
/// <param name="referenceTime">
/// The time for which the method should calculate if a file needs to be rotated or not.
/// </param>
/// <param name="closedSessionFile">
/// Output parameter that will receive the name of the ETL file that was just closed in case
/// an actual file rotation was performed. It is meaningful only if the method returns true.
/// </param>
/// <returns>
/// True if an actual file rotation was performed, otherwise it returns false.
/// </returns>
public bool RotateSessionFile(DateTime referenceTime, out string closedSessionFile)
{
var rotatedFile = false;
closedSessionFile = null;
if (this.isFileCollector)
{
// The rotation can be triggered in two conditions:
// 1. If the time for the current file passed
// 2. If the current size of the file exceeds the threshold
var attemptRotation =
((referenceTime - this.lastRotationTime) > this.maxFileTimeSpan) ||
(this.GetCurrentFileSize() > this.maxFileSizeKB);
if (attemptRotation)
{
var nextFileName = this.GenerateNextSessionFileName();
var args = string.Format(CultureInfo.InvariantCulture, "update \"{0}\" -o \"{1}\" -ets", this.Name, nextFileName);
var exitCode = RunCommand("logman", args);
if (exitCode == ExitCodeSuccess)
{
rotatedFile = true;
closedSessionFile = this.currentEtlSessionFile;
this.currentEtlSessionFile = nextFileName;
this.lastRotationTime = DateTime.UtcNow;
this.DeleteOlderSessionFiles();
}
}
}
return rotatedFile;
}
/// <summary>
/// Checks if an existing ETW session satisfies the given collector configuration.
/// </summary>
/// <param name="config">
/// Configuration that should be checked against the existing ETW session.
/// </param>
/// <returns>
/// True if the existing session satisfies the given configuration, false otherwise.
/// </returns>
internal static bool ExistingSessionSatisfiesProviders(CollectorConfiguration config)
{
// Ok, the session is already in place, just check the providers
bool providersOk = false;
NativeMethods.EventTraceProperties sessionProperties;
if (EtwSessionManager.TryGetSessionProperties(config.Name, out sessionProperties))
{
providersOk = true;
// Loop over providers and ensure that they are good
var loggerId = sessionProperties.Wnode.HistoricalContext;
foreach (var provider in config.Providers)
{
NativeMethods.TraceEnableInfo providerInSession;
providersOk = EtwSessionManager.GetProviderInfo(loggerId, provider.Value.Id, out providerInSession)
&& providerInSession.IsEnabled != 0
&& provider.Value.Level <= (EtwTraceLevel)providerInSession.Level
&& (provider.Value.KeywordsAll | providerInSession.MatchAllKeyword) == provider.Value.KeywordsAll
&& (provider.Value.KeywordsAny & providerInSession.MatchAnyKeyword) == provider.Value.KeywordsAny;
if (!providersOk)
{
// pass the native type to a provider to get a nice string representing the
// current settings
var providerConfigInSession = new ProviderConfiguration(
provider.Value.Id,
(EtwTraceLevel)providerInSession.Level,
providerInSession.MatchAnyKeyword,
providerInSession.MatchAllKeyword);
Logger.Log(
LoggerLevel.Error,
LogId,
"ExistingSessionSatisfiesProviders",
"Provider configuration [{0}] is not satisfied in ETW session [{1}]. Actual provider settings in session: {2}",
provider.Value,
config.Name,
providerInSession.IsEnabled == 0 ? "provider not enabled" : providerConfigInSession.ToString());
}
}
}
return providersOk;
}
/// <summary>
/// Disables a specific provider in a given collector.
/// </summary>
/// <param name="collectorName">
/// The sessionName of the collector in which the provider should be disabled.
/// </param>
/// <param name="providerId">
/// The Id of provider that should be disabled.
/// </param>
private static void DisableProvider(string collectorName, Guid providerId)
{
var args = string.Format(
CultureInfo.InvariantCulture,
"update \"{0}\" --p {1} -ets",
collectorName,
providerId.ToString("B"));
// Ignore any error when stopping a provider, information already logged inside RunCommand
RunCommand("logman", args);
}
/// <summary>
/// Executes a command-line application and returns its exit code.
/// </summary>
/// <param name="fileName">
/// Name of the application to be executed.
/// </param>
/// <param name="arguments">
/// Command-line arguments to be passed to the application.
/// </param>
/// <returns>
/// Exit code returned by the application.
/// </returns>
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Not accessible to any 3rd-party MS or not")]
private static int RunCommand(string fileName, string arguments)
{
const string MethodName = "RunCommand";
Logger.Log(
LoggerLevel.Info,
LogId,
MethodName,
"[{0} {1}]",
fileName,
arguments);
using (var p = new Process())
{
p.StartInfo.UseShellExecute = false;
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.FileName = fileName;
p.StartInfo.Arguments = arguments;
p.StartInfo.CreateNoWindow = true;
p.Start();
while (!p.StandardOutput.EndOfStream)
{
string line = p.StandardOutput.ReadLine();
Logger.Log(LoggerLevel.Info, LogId, MethodName, "\t\t{0}", line);
}
var win32Exception = new Win32Exception(p.ExitCode);
Logger.Log(
LoggerLevel.Info,
LogId,
MethodName,
"exitCode=0x{0}: {1}",
p.ExitCode.ToString("X8"),
win32Exception.Message);
return p.ExitCode;
}
}
/// <summary>
/// Performs an IO operation catching all IO, security and unauthorized access
/// exceptions.
/// </summary>
/// <param name="ioOperation">
/// IO operation to be performed.
/// </param>
/// <param name="failureAction">
/// Action to be executed in case an exception is raised by the IO operation.
/// Note that this action takes the raised exception was a parameter.
/// </param>
/// <returns>
/// True if the IO operation did not raise any exception, false otherwise.
/// </returns>
private static bool ProtectedIO(Action ioOperation, Action<Exception> failureAction)
{
Exception exception = null;
try
{
ioOperation();
}
catch (IOException e)
{
exception = e;
}
catch (SecurityException e)
{
exception = e;
}
catch (UnauthorizedAccessException e)
{
exception = e;
}
if (exception != null)
{
failureAction(exception);
}
return exception == null;
}
/// <summary>
/// Builds the name of the next ETL file.
/// </summary>
/// <returns>
/// The name for the next ETL for the session.
/// </returns>
private string GenerateNextSessionFileName()
{
// ATTENTION: during rotation of ETL files the deletion code assumes that if the ETL files
// are sorted according to their names (ordinal and ignore case) the older ones are going
// to be the first ones. Be careful if changing the resulting file name.
var dateTime = DateTime.UtcNow;
var nextSessionFileName = string.Format(
CultureInfo.CurrentCulture,
"{0}_{1:0000}-{2:00}-{3:00}_{4:00}-{5:00}-{6:00}_utc.etl",
this.etlBaseName,
dateTime.Year,
dateTime.Month,
dateTime.Day,
dateTime.Hour,
dateTime.Minute,
dateTime.Second);
return Path.Combine(this.EtlLogsDirectory, nextSessionFileName);
}
/// <summary>
/// Deletes older session files associated to the collector.
/// </summary>
/// <remarks>
/// In order for this method to work correctly it depends that a sort by file names
/// should produce the same sequence as sorting the files by their creation date.
/// </remarks>
private void DeleteOlderSessionFiles()
{
if (Directory.Exists(this.EtlLogsDirectory))
{
List<string> existingFiles = this.GetExistingEtlFiles();
var totalFilesToDelete = existingFiles.Count - this.maxFileCount;
if (totalFilesToDelete > 0)
{
// ATTENTION: Sorting file names should give the older files in front of the list
existingFiles.Sort(StringComparer.OrdinalIgnoreCase);
for (int i = 0; i < totalFilesToDelete; ++i)
{
var existingFile = existingFiles[i];
ProtectedIO(
() => File.Delete(existingFile),
e => Logger.Log(
LoggerLevel.Warning,
LogId,
"DeleteOlderSessionFiles",
"Failed to delete ETL file [{0}]. Exception: {1}",
existingFile,
e.Message));
}
}
}
}
/// <summary>
/// Gets the ETL files associated to this collector.
/// </summary>
/// <returns>
/// The list of ETL files generated by the collector so far.
/// </returns>
private List<string> GetExistingEtlFiles()
{
List<string> existingFiles = null;
var searchPattern = this.etlBaseName + "_*.etl";
// ReSharper disable ImplicitlyCapturedClosure
if (!ProtectedIO(
() => existingFiles = Directory.GetFiles(this.EtlLogsDirectory, searchPattern).ToList(),
e =>
Logger.Log(
LoggerLevel.Warning,
LogId,
"GetExistingEtlFiles",
"Failed to enumerate files at [{0}] with mask [{1}] for collector [{2}]. Exception: {3}",
this.EtlLogsDirectory,
searchPattern,
this.Name,
e.Message)))
{
return new List<string>();
}
// ReSharper restore ImplicitlyCapturedClosure
return existingFiles;
}
/// <summary>
/// Gets the size of the current ETL file, in kilobytes, being written by the collector.
/// </summary>
/// <returns>
/// The current size of the ETL file being written by the collector.
/// </returns>
private uint GetCurrentFileSize()
{
if (!this.isFileCollector)
{
throw new InvalidOperationException("Tried to obtain file size for the non-file collector [" + this.Name + "]");
}
throw new NotImplementedException();
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="DistinctCountConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
/// <summary>
/// Configures distinct count feature on this preaggregate.
/// </summary>
public sealed class DistinctCountConfiguration : IDistinctCountConfiguration
{
private readonly List<string> dimensions;
/// <summary>
/// Initializes a new instance of the <see cref="DistinctCountConfiguration"/> class.
/// </summary>
public DistinctCountConfiguration()
{
this.dimensions = new List<string>();
}
/// <summary>
/// Initializes a new instance of the <see cref="DistinctCountConfiguration"/> class.
/// </summary>
/// <param name="dimensions">The dimensions.</param>
[JsonConstructor]
internal DistinctCountConfiguration(IEnumerable<string> dimensions)
{
this.dimensions = dimensions?.ToList() ?? new List<string>();
}
/// <summary>
/// Gets the dimensions.
/// </summary>
public IEnumerable<string> Dimensions
{
get { return this.dimensions; }
}
/// <summary>
/// Adds the dimension.
/// </summary>
/// <param name="dimensionToAdd">The dimension to add.</param>
public void AddDimension(string dimensionToAdd)
{
if (string.IsNullOrWhiteSpace(dimensionToAdd))
{
throw new ArgumentNullException(nameof(dimensionToAdd));
}
if (this.dimensions.Count == 0)
{
this.dimensions.Add(dimensionToAdd);
return;
}
for (var i = 0; i < this.dimensions.Count; ++i)
{
var comparison = string.Compare(this.dimensions[i], dimensionToAdd, StringComparison.OrdinalIgnoreCase);
if (comparison == 0)
{
throw new ConfigurationValidationException("Cannot add duplicate dimensions.", ValidationType.DuplicateDimension);
}
if (comparison > 0)
{
this.dimensions.Insert(i, dimensionToAdd);
return;
}
if ((i + 1) == this.dimensions.Count)
{
this.dimensions.Add(dimensionToAdd);
return;
}
}
}
/// <summary>
/// Removes the dimension.
/// </summary>
/// <param name="dimension">The dimension.</param>
public void RemoveDimension(string dimension)
{
this.dimensions.RemoveAll(x => string.Equals(x, dimension, StringComparison.OrdinalIgnoreCase));
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="ProviderConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
/// <summary>
/// Type that reads the settings of a single provider from a configuration file.
/// </summary>
/// <remarks>
/// The value of a provider key is expected to be in the following format:
/// <para/><GUID>[,<parameterPair>]*<para/>
/// In which a parameterPair has the following format "parameterName:parameterValue".
/// Below is a list of some valid provider configurations with respective keys:
/// <para/>
/// Provider1 = {D857C50C-9002-4852-94A4-7264063CF38D}
/// Provider2 = {9FD91669-452C-4B25-AD5B-5322D511DA65},level:Informational,KeywordsAny:0x1f0
/// Provider3 = {B7F33BAA-E45A-4FCF-8389-FA103A2AC23C},KeywordsAll:0x5
/// <para>
/// None of the parameters is mandatory, for all of them a default value is provided if not
/// specified on the configuration.
/// </para>
/// </remarks>
internal sealed class ProviderConfiguration
{
/// <summary>
/// Initializes a new instance of the <see cref="ProviderConfiguration"/> class with the specific
/// values given by the caller.
/// </summary>
/// <param name="id">
/// The id that identifies the provider.
/// </param>
/// <param name="level">
/// The logging level from which events should be logged.
/// </param>
/// <param name="keywordsAny">
/// The "keywords any" value to be used when enabling this provider.
/// </param>
/// <param name="keywordsAll">
/// The "keywords all" value to be used when enabling this provider.
/// </param>
public ProviderConfiguration(Guid id, EtwTraceLevel level, long keywordsAny, long keywordsAll)
{
this.Id = id;
this.Level = level;
this.KeywordsAny = keywordsAny;
this.KeywordsAll = keywordsAll;
}
/// <summary>
/// Initializes a new instance of the <see cref="ProviderConfiguration"/> class.
/// </summary>
private ProviderConfiguration()
{
}
/// <summary>
/// Gets the unique id of the provider.
/// </summary>
public Guid Id { get; private set; }
/// <summary>
/// Gets the level from which events should be logged.
/// </summary>
public EtwTraceLevel Level { get; private set; }
/// <summary>
/// Gets the match any keyword value to be used when enabling this provider.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/dd392305(v=vs.85).aspx"/>
public long KeywordsAny { get; private set; }
/// <summary>
/// Gets the match all keyword value to be used when enabling this provider.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/dd392305(v=vs.85).aspx"/>
public long KeywordsAll { get; private set; }
/// <summary>
/// Overriding ToString method to help tests and debugging.
/// </summary>
/// <returns>
/// The <see cref="string"/> representing the provider configuration.
/// </returns>
public override string ToString()
{
return string.Format(
"Id: {0} Level: {1} KeywordsAny: 0x{2} KeywordsAll: 0x{3}",
this.Id.ToString("B"),
this.Level.ToString(),
this.KeywordsAny.ToString("X16"),
this.KeywordsAll.ToString("X16"));
}
/// <summary>
/// Creates a new instance cloned from the current one.
/// </summary>
/// <returns>
/// The new <see cref="ProviderConfiguration"/> instance cloned from the current
/// instance.
/// </returns>
public ProviderConfiguration Clone()
{
var clone = new ProviderConfiguration();
clone.Id = this.Id;
clone.Level = this.Level;
clone.KeywordsAny = this.KeywordsAny;
clone.KeywordsAll = this.KeywordsAll;
return clone;
}
/// <summary>
/// The merge the provider configuration with other instance when the provider
/// is being enabled.
/// </summary>
/// <param name="otherConfiguration">
/// The other configuration instance to be merged for enable.
/// </param>
public void MergeForEnable(ProviderConfiguration otherConfiguration)
{
if (this.Id == otherConfiguration.Id)
{
this.Level = this.Level > otherConfiguration.Level ? this.Level : otherConfiguration.Level;
this.KeywordsAny |= otherConfiguration.KeywordsAny;
this.KeywordsAll &= otherConfiguration.KeywordsAll;
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IQueryResultListV3.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using System.Collections.Generic;
/// <summary>
/// Holds the list of query results and the associated metadata.
/// </summary>
public interface IQueryResultListV3
{
/// <summary>
/// Gets the end time in UTC for the query results.
/// </summary>
DateTime EndTimeUtc { get; }
/// <summary>
/// Gets the start time in UTC for the query results.
/// </summary>
DateTime StartTimeUtc { get; }
/// <summary>
/// Gets the time resolution in milliseconds for the query results.
/// </summary>
int TimeResolutionInMinutes { get; }
/// <summary>
/// Gets the query results. Each result represent a single time series where start time, end time and time resolution
/// is represented by this object members.
/// </summary>
IReadOnlyList<IQueryResultV3> Results { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="CertificateHelper.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.Linq;
using System.Security;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
/// <summary>
/// The certificate helper class
/// </summary>
internal class CertificateHelper
{
/// <summary>
/// Finds and validates certificate.
/// </summary>
/// <param name="certificateThumbprint">The certificate thumbprint.</param>
/// <param name="certificateStoreLocation">The certificate store location.</param>
/// <returns>The certificate if validated.</returns>
internal static X509Certificate2 FindAndValidateCertificate(string certificateThumbprint, StoreLocation certificateStoreLocation)
{
X509Certificate2 cert = FindX509Certificate(certificateThumbprint, certificateStoreLocation);
if (!cert.HasPrivateKey)
{
throw new MetricsClientException(string.Format("Cert with Thumbprint [{0}] doesn't have a private key", cert.Thumbprint));
}
// Check expire and effective date
DateTime now = DateTime.Now;
if (cert.NotBefore > now)
{
throw new MetricsClientException(string.Format("The certificate is not valid until {0}.", cert.GetEffectiveDateString()));
}
if (cert.NotAfter < now)
{
throw new MetricsClientException(string.Format("The certificate is not valid after {0}.", cert.GetExpirationDateString()));
}
try
{
if (cert.PrivateKey == null)
{
throw new MetricsClientException("The certificate has a private key but the PrivateKey property is null, and it is typically due to a permission issue.");
}
}
catch (CryptographicException)
{
throw new MetricsClientException("The certificate has a private key but the PrivateKey property is null, and it is typically due to a permission issue.");
}
return cert;
}
/// <summary>
/// Finds the X509 certificate.
/// </summary>
/// <param name="thumbprint">The thumbprint.</param>
/// <param name="storeLocation">The store location.</param>
/// <returns>
/// The <see cref="X509Certificate2" /> certificate if found.
/// </returns>
private static X509Certificate2 FindX509Certificate(string thumbprint, StoreLocation storeLocation)
{
var store = new X509Store(StoreName.My, storeLocation);
store.Open(OpenFlags.ReadOnly | OpenFlags.OpenExistingOnly);
var certificates = store.Certificates.Find(X509FindType.FindByThumbprint, thumbprint, false);
if (certificates.Count == 0)
{
throw new MetricsClientException(
string.Format("No cert with Thumbprint [{0}] is found in the [{1}] store", thumbprint, storeLocation));
}
var cert = certificates.OfType<X509Certificate2>().FirstOrDefault(c => c.HasPrivateKey);
if (cert == null)
{
throw new MetricsClientException(string.Format("No cert with Thumbprint [{0}] has a private key", thumbprint));
}
return cert;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="MinMaxConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// Determine if the minimum and maximum sampling types are available for this preaggregate.
/// </summary>
public sealed class MinMaxConfiguration : IMinMaxConfiguration
{
/// <summary>
/// MinMaxConfiguration where minimum and maximum sampling types are enabled.
/// </summary>
public static readonly MinMaxConfiguration MinMaxEnabled = new MinMaxConfiguration(true);
/// <summary>
/// MinMaxConfiguration where minimum and maximum sampling types are disabled.
/// </summary>
public static readonly MinMaxConfiguration MinMaxDisabled = new MinMaxConfiguration(false);
/// <summary>
/// Initializes a new instance of the <see cref="MinMaxConfiguration"/> class.
/// </summary>
/// <param name="enabled">Whether or not the feature is enabled.</param>
[JsonConstructor]
internal MinMaxConfiguration(bool enabled)
{
this.Enabled = enabled;
}
/// <summary>
/// Determines if the minimum and maximum sampling types are enabled or disabled.
/// </summary>
public bool Enabled { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="HyperLogLogSketch.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using global::Metrics.Services.Common;
/// <summary>
/// Represents a hyperloglog sketch.
/// </summary>
public class HyperLogLogSketch : IPoolTrackable
{
/// <summary>
/// Maximum value of B.
/// </summary>
public const int MaxBValue = 14;
/// <summary>
/// Maximum value of B.
/// </summary>
public const int DefaultBValue = 10;
/// <summary>
/// The b value.
/// </summary>
private readonly byte bValue;
/// <summary>
/// The registers.
/// </summary>
private byte[] registers;
/// <summary>
/// Initializes a new instance of the <see cref="HyperLogLogSketch"/> class.
/// </summary>
/// <param name="bValue">HyperLogLog B value</param>
/// <param name="initializeRegisters">Flag to indicate whether to initialize registers.</param>
public HyperLogLogSketch(int bValue, bool initializeRegisters = true)
{
if (initializeRegisters)
{
this.registers = new byte[1 << bValue];
}
this.bValue = (byte)bValue;
}
/// <summary>
/// Gets the B value for sketch.
/// </summary>
public byte BValue => this.bValue;
/// <summary>
/// Gets the registers associated with the sketch.
/// </summary>
public byte[] Registers
{
get
{
return this.registers;
}
protected set
{
this.registers = value;
}
}
/// <inheritdoc />
PoolObjectTrackingInfo IPoolTrackable.PoolObjectTrackingInfo { get; set; }
/// <summary>
/// Sets the register value for given key.
/// </summary>
/// <param name="key">Index value.</param>
/// <returns>
/// Register value at given key.
/// </returns>
public byte this[int key]
{
get
{
return this.registers[key];
}
set
{
this.registers[key] = value;
}
}
/// <summary>
/// Initializes the Sketch.
/// </summary>
public void Reset()
{
for (var i = 0; i < this.registers.Length; i++)
{
this.registers[i] = 0;
}
}
/// <summary>
/// Aggregates the given sketch to this sketch.
/// </summary>
/// <param name="other">Other sketch.</param>
public void Aggregate(HyperLogLogSketch other)
{
if (other.BValue != this.bValue)
{
// We do not support aggregation of non-aligned buffers.
return;
}
for (var i = 0; i < this.registers.Length; i++)
{
if (this.registers[i] < other.registers[i])
{
this.registers[i] = other.registers[i];
}
}
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="MonitorConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Monitors
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using Configuration;
using Logging;
using Metrics;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using Utility;
/// <summary>
/// Monitor configuration manager.
/// </summary>
public sealed class MonitorConfigurationManager : IMonitorConfigurationManager
{
private static readonly object LogId = Logger.CreateCustomLogId(nameof(MonitorConfigurationManager));
private static readonly string LogTag = nameof(MonitorConfigurationManager);
private readonly ConnectionInfo connectionInfo;
private readonly HttpClient httpClient;
private readonly string metricUrlPrefix;
private readonly IMetricReader metricReader;
private readonly JsonSerializerSettings serializerSettings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.Auto,
ContractResolver = new DefaultContractResolver
{
NamingStrategy = new CamelCaseNamingStrategy
{
ProcessDictionaryKeys = false
}
}
};
/// <summary>
/// Initializes a new instance of the <see cref="MonitorConfigurationManager" /> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the endpoint being used.</param>
public MonitorConfigurationManager(ConnectionInfo connectionInfo)
: this(connectionInfo, HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo), new MetricReader(connectionInfo))
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MonitorConfigurationManager" /> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the endpoint being used.</param>
/// <param name="httpClient">The HTTP client.</param>
/// <param name="metricReader">The metric reader.</param>
internal MonitorConfigurationManager(ConnectionInfo connectionInfo, HttpClient httpClient, IMetricReader metricReader)
{
this.connectionInfo = connectionInfo;
this.metricUrlPrefix = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.ConfigRelativeUrl);
this.httpClient = httpClient;
this.metricReader = metricReader;
}
/// <summary>
/// Gets or sets the maximum parallel running tasks.
/// </summary
public int MaxParallelRunningTasks { get; set; } = 20;
/// <inheritdoc />
public async Task<ConfigurationUpdateResultList> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
string metricName,
bool skipVersionCheck = false,
bool validate = true)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (metricNamespace == null)
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (metricName == null)
{
throw new ArgumentNullException(nameof(metricName));
}
var operation = $"{this.metricUrlPrefix}/replicateMonitorConfigurations";
var path =
$"{operation}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{metricNamespace}/metricName/{metricName}/skipVersionCheck/{skipVersionCheck}/operation/Replace";
var uriBuilder = new UriBuilder(this.connectionInfo.GetGlobalEndpoint())
{
Path = path,
Query = $"validate={validate}"
};
var result = new ConfigurationUpdateResultList
{
MonitoringAccount = monitoringAccount.Name,
MetricNamespace = metricNamespace,
MetricName = metricName
};
try
{
if (monitoringAccount.MirrorMonitoringAccountList == null || !monitoringAccount.MirrorMonitoringAccountList.Any())
{
throw new Exception("MirrorAccountsList can't be null or empty while replicating monitors.");
}
var serializedTargetAccounts = JsonConvert.SerializeObject(monitoringAccount.MirrorMonitoringAccountList.ToList(), Formatting.Indented, this.serializerSettings);
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
operation,
serializedContent: serializedTargetAccounts).ConfigureAwait(false);
result.ConfigurationUpdateResults =
JsonConvert.DeserializeObject<ConfigurationUpdateResult[]>(
response.Item1,
this.serializerSettings);
foreach (var updateResult in result.ConfigurationUpdateResults)
{
if (!updateResult.Success)
{
result.Success = false;
result.ExceptionMessage = updateResult.Message;
return result;
}
}
result.Success = true;
return result;
}
catch (MetricsClientException mce)
{
result.Success = false;
if (mce.ResponseStatusCode == HttpStatusCode.Unauthorized || mce.ResponseStatusCode == HttpStatusCode.Forbidden)
{
var exMsg =
$"Unable to sync configuration for monitoringAccount:{monitoringAccount.Name}, metricNamespace:{metricNamespace}, metricName:{metricName}"
+ $"doesn't have permission to update configurations in mirror accounts. Response:{mce.Message}";
throw new ConfigurationValidationException(exMsg, ValidationType.ServerSide, mce);
}
result.ExceptionMessage = mce.Message;
return result;
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
bool skipVersionCheck = false,
bool validate = true)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrEmpty(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
var metricNames = await this.metricReader.GetMetricNamesAsync(
monitoringAccount.Name,
metricNamespace).ConfigureAwait(false);
var taskList = new List<Task<ConfigurationUpdateResultList>>(this.MaxParallelRunningTasks);
List<ConfigurationUpdateResultList> results = new List<ConfigurationUpdateResultList>();
foreach (var metricName in metricNames)
{
if (taskList.Count == this.MaxParallelRunningTasks)
{
await this.WaitForSync(taskList, results).ConfigureAwait(false);
taskList.Clear();
}
else
{
taskList.Add(this.SyncConfigurationAsync(monitoringAccount, metricNamespace, metricName, skipVersionCheck, validate));
}
}
if (taskList.Count > 0)
{
await this.WaitForSync(taskList, results).ConfigureAwait(false);
taskList.Clear();
}
return results;
}
/// <inheritdoc />
public async Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false,
bool validate = true)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var namespaces =
await this.metricReader.GetNamespacesAsync(monitoringAccount.Name).ConfigureAwait(false);
List<ConfigurationUpdateResultList> results = new List<ConfigurationUpdateResultList>();
foreach (var ns in namespaces)
{
var namespaceResults = await this.SyncConfigurationAsync(
monitoringAccount,
ns,
skipVersionCheck,
validate).ConfigureAwait(false);
// For QOS namespace or other internal namespace, there is no configuration to
// replicate and thus the namespace results is an empty list.
if (namespaceResults.Count > 0)
{
results.AddRange(namespaceResults);
}
}
return results;
}
/// <summary>
/// A helper method which waits for all given sync all tasks to complete.
/// </summary>
/// <param name="taskList">The task list.</param>
/// <param name="results">The results.</param>
/// <returns>An awaitable <see cref="Task" />.</returns>
private async Task WaitForSync(
List<Task<ConfigurationUpdateResultList>> taskList,
List<ConfigurationUpdateResultList> results)
{
try
{
await Task.WhenAll(taskList).ConfigureAwait(false);
foreach (var task in taskList)
{
if (task.Result.Success)
{
if (task.Result.ConfigurationUpdateResults.Count > 0)
{
results.Add(task.Result);
}
}
else
{
if (task.Result.ExceptionMessage.Contains(
"Monitor configuration to be updated can't be null."))
{
// No configuration exist for the specified metrics and hence nothing
// to replicate.
continue;
}
results.Add(task.Result);
}
}
}
catch (Exception ex)
{
Logger.Log(
LoggerLevel.Error,
LogId,
LogTag,
$"Exception occurred while replicating configuration. Exception: {ex}");
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ILocalRawMetric.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
/// <summary>
/// An interface representing data for one raw metric.
/// </summary>
public interface ILocalRawMetric
{
/// <summary>
/// Gets the monitoring account to which this metric is reported.
/// </summary>
string MonitoringAccount { get; }
/// <summary>
/// Gets the metric namespace.
/// </summary>
string MetricNamespace { get; }
/// <summary>
/// Gets the metric name.
/// </summary>
string MetricName { get; }
/// <summary>
/// Gets the metric timestamp.
/// </summary>
DateTime MetricTimeUtc { get; }
/// <summary>
/// Gets the metric dimensions.
/// </summary>
IDictionary<string, string> Dimensions { get; }
/// <summary>
/// Gets a value indicating whether the metric is a platform metric.
/// In such case its value should be taken using property <see cref="MetricDoubleValue" />.
/// </summary>
bool IsPlatformMetric { get; }
/// <summary>
/// Gets the metric value emitted using metric API.
/// </summary>
ulong MetricLongValue { get; }
/// <summary>
/// Gets the value of the platform counters.
/// </summary>
double MetricDoubleValue { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IEtlSubscriber.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Defines the subscribers of ETL files being periodically cut and process each one of them.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
/// <summary>
/// Enumeration with the possible status at the end of the processing of a single ETL file.
/// </summary>
internal enum EtlCompletionStatus
{
/// <summary>
/// ETL file was successfully processed.
/// </summary>
Success,
/// <summary>
/// Cancellation was requested while processing the ETL file.
/// </summary>
CancellationRequested,
/// <summary>
/// The ETL file could not being opened.
/// </summary>
FailureToOpen,
/// <summary>
/// An exception happened while processing the ETL file.
/// </summary>
ErrorWhileProcessing,
}
/// <summary>
/// Defines the subscribers of ETL files being periodically cut and process each one of them.
/// </summary>
/// <remarks>
/// The subscriber is going to receive the raw callbacks provided by ETW to process ETW sessions
/// (RecordCallback and BufferCallback).
/// </remarks>
internal unsafe interface IEtlSubscriber
{
/// <summary>
/// Gets the configuration needed to processes backlog of ETL files.
/// </summary>
EtlBacklogConfig EtlBacklogConfig { get; }
/// <summary>
/// Gets the minimum interval that the dispatcher should wait between dispatching files to the
/// subscriber.
/// </summary>
TimeSpan MinimumIntervalBetweenEtlFiles { get; }
/// <summary>
/// Called when the processing of an ETL file is about to start.
/// </summary>
/// <param name="etlFileName">
/// Name of the ETL file that is about to be processed.
/// </param>
/// <remarks>
/// The subscriber should be processing a single ETL file at time, calls to the ETW callbacks
/// (see below) will only start after this method is called for the ETL file.
/// </remarks>
void Start(string etlFileName);
/// <summary>
/// Receives the ETW callback passing the ETW events from the ETL file currently being processed,
/// see EventRecordCallback on MSDN for more information.
/// </summary>
/// <param name="eventRecord">
/// Pointer to the event record received from the ETW session.
/// </param>
void RecordCallback(NativeMethods.EventRecord* eventRecord);
/// <summary>
/// Receives the ETW callback passing the ETW buffers from the ETL file currently being processed,
/// see EventTraceBufferCallback on MSDN for more information.
/// </summary>
/// <param name="eventTraceLog">
/// Pointer to the event trace log structure but due to the nature of this structure (variable size
/// and containing null terminated strings) it is easier to perform marshaling in the code (if needed).
/// </param>
/// <returns>
/// True if the processing of the trace should continue, false to stop processing the trace.
/// </returns>
bool BufferCallback(IntPtr eventTraceLog);
/// <summary>
/// Notifies that the processing for the given ETL file is complete and let the subscriber knows
/// if the processed stop because cancellation or not.
/// </summary>
/// <param name="etlFileName">
/// Name of the ETL file that finished being processed.
/// </param>
/// <param name="completionStatus">
/// Enumeration that gives information about the completion status, i.e.: error, success, etc.
/// </param>
/// <param name="exception">
/// In case of error during the processing this will have the exception that was captured.
/// Use this in combination with the completion status to fully diagnostic what happened.
/// </param>
void End(string etlFileName, EtlCompletionStatus completionStatus, Exception exception);
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="ConfigFileValidator.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System.Linq;
using Microsoft.Cloud.Metrics.Client.Configuration;
using Microsoft.Cloud.Metrics.Client.Logging;
/// <summary>
/// The validator class for configuration files.
/// </summary>
internal static class ConfigFileValidator
{
private static readonly object LogId = Logger.CreateCustomLogId("ConfigFileValidator");
/// <summary>
/// Validates the raw metric configuration from file to see if they contain the expected configuration for the command in question.
/// </summary>
/// <param name="metricConfigFromFile">The metric configuration from file.</param>
/// <returns>True if passing validation; false otherwise.</returns>
internal static bool ValidateRawMetricConfigFromFile(IRawMetricConfiguration metricConfigFromFile)
{
if (metricConfigFromFile.Preaggregations == null || !metricConfigFromFile.Preaggregations.Any())
{
Logger.Log(LoggerLevel.Error, LogId, "ValidateRawMetricConfigFromFile", "Preaggregations property is not set or empty so it seems to be an invalid raw metric config.");
return false;
}
return true;
}
/// <summary>
/// Validates the composite metric configuration from file to see if they contain the expected configuration for the command in question.
/// </summary>
/// <param name="metricConfigFromFile">The metric configuration from file.</param>
/// <returns>True if passing validation; false otherwise.</returns>
internal static bool ValidateCompositeMetricConfigFromFile(ICompositeMetricConfiguration metricConfigFromFile)
{
if (metricConfigFromFile.MetricSources == null || !metricConfigFromFile.MetricSources.Any())
{
Logger.Log(LoggerLevel.Error, LogId, "ValidateCompositeMetricConfigFromFile", "MetricSources property is not set or empty so it seems to be an invalid composite metric config.");
return false;
}
return true;
}
/// <summary>
/// Validates the metric configuration from file to see if they contain the expected configuration contents.
/// </summary>
/// <param name="metricConfigFromFile">The metric configuration from file.</param>
/// <returns>True if passing validation; false otherwise.</returns>
internal static bool ValidateMetricConfigFromFile(IMetricConfiguration metricConfigFromFile)
{
return metricConfigFromFile is RawMetricConfiguration
? ValidateRawMetricConfigFromFile((RawMetricConfiguration)metricConfigFromFile)
: ValidateCompositeMetricConfigFromFile((CompositeMetricConfiguration)metricConfigFromFile);
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MonitorIdentifier.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Monitors
{
using System;
using Microsoft.Online.Metrics.Serialization.Configuration;
using Newtonsoft.Json;
/// <summary>
/// A class representing a monitor identifier.
/// </summary>
public struct MonitorIdentifier : IEquatable<MonitorIdentifier>
{
/// <summary>
/// The metric identifier.
/// </summary>
private readonly MetricIdentifier metricIdentifier;
/// <summary>
/// The monitor ID as in the monitor configuration.
/// </summary>
private readonly string monitorId;
/// <summary>
/// Initializes a new instance of the <see cref="MonitorIdentifier"/> struct.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <param name="monitorId">The monitor identifier.</param>
[JsonConstructor]
public MonitorIdentifier(MetricIdentifier metricIdentifier, string monitorId)
{
if (monitorId == null)
{
throw new ArgumentNullException("monitorId");
}
this.metricIdentifier = metricIdentifier;
this.monitorId = monitorId;
}
/// <summary>
/// Gets the metric identifier.
/// </summary>
public MetricIdentifier MetricIdentifier
{
get
{
return this.metricIdentifier;
}
}
/// <summary>
/// Gets the monitor ID as in the monitor configuration.
/// </summary>
public string MonitorId
{
get
{
return this.monitorId;
}
}
/// <summary>
/// Indicates whether the current object is equal to another object of the same type.
/// </summary>
/// <param name="other">An object to compare with this object.</param>
/// <returns>
/// true if the current object is equal to the <paramref name="other" /> parameter; otherwise, false.
/// </returns>
public bool Equals(MonitorIdentifier other)
{
return this.metricIdentifier.Equals(other.metricIdentifier)
&& string.Equals(this.monitorId, other.monitorId, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Determines whether the specified <paramref name="obj"/> is equal to this instance.
/// </summary>
/// <param name="obj">The <see cref="object" /> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="object" /> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj))
{
return false;
}
return obj is MonitorIdentifier && this.Equals((MonitorIdentifier)obj);
}
/// <summary>
/// Returns a hash code for this instance.
/// </summary>
/// <returns>
/// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table.
/// </returns>
public override int GetHashCode()
{
unchecked
{
return (this.metricIdentifier.GetHashCode() * 397)
^ (this.monitorId != null ? StringComparer.OrdinalIgnoreCase.GetHashCode(this.monitorId) : 0);
}
}
/// <summary>
/// Validates this instance.
/// </summary>
internal void Validate()
{
// ReSharper disable once ImpureMethodCallOnReadonlyValueField
this.metricIdentifier.Validate();
if (string.IsNullOrWhiteSpace(this.monitorId))
{
throw new ArgumentException("monitorId is null or empty.");
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="HyperLogLogSketches.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
/// <summary>
/// Represents a List of Distinct Count dimension name and HyperLogLogSketch associated with it.
/// </summary>
public class HyperLogLogSketches : List<KeyValuePair<string, HyperLogLogSketch>>, IReadOnlyHyperLogLogSketches
{
/// <summary>
/// Constucts an instance of class HyperLogLogSketches
/// </summary>
public uint HyperLogLogSketchesCount
{
get
{
return (uint)this.Count;
}
}
/// <summary>
/// Implements the interface IReadOnlyHyperLogLogSketches
/// </summary>
IEnumerable<KeyValuePair<string, HyperLogLogSketch>> IReadOnlyHyperLogLogSketches.HyperLogLogSketches
{
get
{
return this;
}
}
/// <summary>
/// Sets the register value for given key.
/// </summary>
/// <param name="key">Index value.</param>
/// <returns>A <see cref="HyperLogLogSketch"/> instance at the given index.</returns>
public HyperLogLogSketch this[string key]
{
get
{
var index = this.Find(key);
if (index < 0)
{
throw new KeyNotFoundException(string.Format("{0} not found in sketches list", key));
}
return this[index].Value;
}
set
{
this.Add(key, value);
}
}
/// <summary>
/// Checks if given distinct count dimension exists.
/// </summary>
/// <param name="distinctCountDimensionName">Distinct count dimension name.</param>
/// <returns>True if exists else false.</returns>
public bool ContainsKey(string distinctCountDimensionName)
{
return this.Find(distinctCountDimensionName) >= 0;
}
/// <summary>
/// Adds a key/value pair to <see cref="HyperLogLogSketches"/> by using the specified function, if the key does not already exist.
/// </summary>
/// <param name="key">Key of the element.</param>
/// <param name="valueFactory">The function used to generate <see cref="HyperLogLogSketch"/>.</param>
/// <returns><see cref="HyperLogLogSketch"/> for specified key. This will be either the existing value or the new value returned by valueFactory (if specified key is not found).</returns>
public HyperLogLogSketch GetOrAdd(string key, Func<HyperLogLogSketch> valueFactory)
{
if (key == null)
{
throw new ArgumentNullException(nameof(key));
}
if (valueFactory == null)
{
throw new ArgumentNullException(nameof(valueFactory));
}
var index = this.Find(key);
HyperLogLogSketch hyperLogLogSketch;
if (index >= 0)
{
hyperLogLogSketch = this[index].Value;
}
else
{
hyperLogLogSketch = valueFactory();
this.Add(new KeyValuePair<string, HyperLogLogSketch>(key, hyperLogLogSketch));
}
return hyperLogLogSketch;
}
/// <summary>
/// Adds the given distinct count dimension and sketch to the list.
/// </summary>
/// <param name="distinctCountDimensionName">Distinct count dimension name.</param>
/// <param name="sketch">Sketch data.</param>
public void Add(string distinctCountDimensionName, HyperLogLogSketch sketch)
{
var index = this.Find(distinctCountDimensionName);
if (index >= 0)
{
this[index] = new KeyValuePair<string, HyperLogLogSketch>(distinctCountDimensionName, sketch);
}
else
{
this.Add(new KeyValuePair<string, HyperLogLogSketch>(distinctCountDimensionName, sketch));
}
}
private int Find(string key)
{
int index = -1;
for (int i = 0; i < this.Count; i++)
{
if (this[i].Key.Equals(key, System.StringComparison.OrdinalIgnoreCase))
{
index = i;
break;
}
}
return index;
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="FrontEndMetricDeserializer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
/// <summary>
/// Class which deserializes Autopilot count (metric) data.
/// Serialization format corresponds to one described in http://sharepoint/sites/autopilot/team/Docs/Silicon/Monitroing%20Team/AD%20Metrics%20%20Autopilot%20Counter%20Data%20Common%20Serialization%20Format.docx.
/// </summary>
/// <typeparam name="TMetadata">Type of metadata object used for deserialization.</typeparam>
public sealed class FrontEndMetricDeserializer<TMetadata>
where TMetadata : IMetricMetadata
{
/// <summary>
/// TDigest format prefix value
/// </summary>
public const uint TDigestPrefixValue = 0x74; // == 't' "tDigest"
private const ushort MinVersion = 3;
private const ushort MaxVersion = 6;
private const uint TypeSerializerFlags = 0x12020000; // Corresponds to 0001.001.0000.0001.00000000000000000 (Use string and metadata interning with variable-length integer serialization)
private readonly List<string> stringsDictionary = new List<string>();
private readonly List<TMetadata> metadataDictionary = new List<TMetadata>();
private readonly List<KeyValuePair<ulong, uint>> histogramBuffer = new List<KeyValuePair<ulong, uint>>(2000);
private readonly List<string> reusableStringsList = new List<string>();
/// <summary>
/// Validates the data packet by CRC check
/// </summary>
/// <param name="dataPacket">Data packet to check.</param>
/// <exception cref="CrcCheckFailedSerializationException">
/// Throws when CRC check fails.
/// </exception>
public static void ValidateCrc(byte[] dataPacket)
{
var version = (ushort)(dataPacket[0] | dataPacket[1] << 8);
if (version < 5)
{
// No CRC is added for versions less than 5.
return;
}
var crc = (uint)(dataPacket[2] | dataPacket[3] << 8 | dataPacket[4] << 16 | dataPacket[5] << 24);
var computedCrc = Crc.ComputeCrc(0, dataPacket, 6);
if (crc != computedCrc)
{
throw new CrcCheckFailedSerializationException($"Crc check failed. Computed CRC : {crc}, Packet CRC: {computedCrc}");
}
}
/// <summary>
/// Deserializes counter (metric) data from the stream and adds all objects to provided collection.
/// </summary>
/// <param name="stream">Stream from which data should be deserialized. Stream should be readable and provide randon access.</param>
/// <param name="metricBuilder">An object responsible for creation and further consumption of deserialized data.</param>
/// <param name="maxMetricStringsLength">Maximum length of strings, which represent metric name, dimension names and values.</param>
/// <param name="maxMetricNamespaceStringsLength">Maximum length of metric namespace string.</param>
/// <param name="maxMetricDimensionValueStringsLength">Maximum length of metric dimension value string.</param>
public void Deserialize(
Stream stream,
IFrontEndMetricBuilder<TMetadata> metricBuilder,
int maxMetricStringsLength,
int maxMetricNamespaceStringsLength,
int maxMetricDimensionValueStringsLength)
{
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException(@"Stream should be readable and provide random access.", nameof(stream));
}
try
{
using (var reader = new BinaryReader(stream, Encoding.UTF8, true))
{
var startStreamPosition = stream.Position;
// Read version and type serializers info
var version = reader.ReadUInt16();
if (version < MinVersion || version > MaxVersion)
{
throw new VersionNotSupportedMetricSerializationException(
string.Format(
CultureInfo.InvariantCulture,
"Version is not supported. ReadVersion:{0}, MinVersion:{1}, MaxVersion:{2}.",
version,
MinVersion,
MaxVersion));
}
if (version >= 5)
{
// Read CRC. CRC check is done in upper layers.
reader.ReadUInt32();
}
if (reader.ReadUInt32() != TypeSerializerFlags)
{
throw new VersionNotSupportedMetricSerializationException("Type serializers not supported.");
}
metricBuilder.SetSerializationVersion(version);
// Read strings
var deserializerDataPosition = stream.Position;
stream.Position += sizeof(long);
stream.Position = startStreamPosition + reader.ReadInt64();
var count = SerializationUtils.ReadUInt32FromBase128(reader);
for (uint i = 0; i < count; ++i)
{
this.stringsDictionary.Add(metricBuilder.GetString(reader.ReadString()));
}
var endOfPacketStreamPosition = stream.Position;
// Read metrics metadata
stream.Position = deserializerDataPosition;
stream.Position = startStreamPosition + reader.ReadInt64();
count = SerializationUtils.ReadUInt32FromBase128(reader);
for (uint i = 0; i < count; ++i)
{
this.metadataDictionary.Add(this.ReadMetricMetadata(reader, metricBuilder, maxMetricStringsLength, maxMetricNamespaceStringsLength));
}
// Read metrics data
stream.Position = deserializerDataPosition + (2 * sizeof(long));
this.ReadMetricsData(reader, metricBuilder, version, maxMetricDimensionValueStringsLength);
// Bring back the stream to total read data position
stream.Position = endOfPacketStreamPosition;
}
}
catch (IOException ioException)
{
throw new MetricSerializationException("Failed to deserialize data. Problem with input stream.", ioException);
}
catch (Exception exception)
{
var serializationException = exception as MetricSerializationException;
bool isInvalidData = false;
if (serializationException != null)
{
isInvalidData = serializationException.IsInvalidData;
}
throw new MetricSerializationException("Failed to deserialize data. Likely the incoming stream contains corrupted data.", exception, isInvalidData);
}
finally
{
this.metadataDictionary.Clear();
this.stringsDictionary.Clear();
this.histogramBuffer.Clear();
this.reusableStringsList.Clear();
}
}
/// <summary>
/// Read metrics count and advances the stream.
/// </summary>
/// <param name="stream">Stream of incoming events data for one packet.</param>
/// <returns>Number of metrics in packet.</returns>
public int ReadMetricsCount(Stream stream)
{
using (var reader = new BinaryReader(stream, Encoding.UTF8))
{
var version = reader.ReadUInt16();
if (version >= 5)
{
reader.ReadUInt32(); // for CRC
}
if (reader.ReadUInt32() != TypeSerializerFlags)
{
throw new VersionNotSupportedMetricSerializationException("Type serializers not supported.");
}
stream.Position += 2 * sizeof(long); // skipping address of strings and address of metadata
if (version >= 5)
{
SerializationUtils.ReadInt64FromBase128(reader); // for packet time
}
return reader.ReadInt32();
}
}
private void ReadMetricsData(
BinaryReader reader,
IFrontEndMetricBuilder<TMetadata> metricBuilder,
ushort version,
int maxMetricDimensionValueStringsLength)
{
long packetTime = 0;
if (version >= 5)
{
packetTime = (long)SerializationUtils.ReadUInt64FromBase128(reader);
}
Stream readerStream = reader.BaseStream;
var metricsCount = reader.ReadUInt32();
for (var i = 0; i < metricsCount; ++i)
{
DateTime timeUtc;
var count = 0U;
var sum = default(MetricValueV2);
var min = default(MetricValueV2);
var max = default(MetricValueV2);
double sumOfSquareDiffFromMean = 0;
var metadata = this.ReadMetricMetadataByIndex(reader);
if (version >= 5)
{
var timeInTicks = (packetTime - SerializationUtils.ReadInt64FromBase128(reader)) * SerializationUtils.OneMinuteInterval;
timeUtc = new DateTime(timeInTicks, DateTimeKind.Utc);
}
else
{
timeUtc = new DateTime((long)SerializationUtils.ReadUInt64FromBase128(reader), DateTimeKind.Utc);
}
for (var j = 0; j < metadata.DimensionsCount; ++j)
{
var dimensionValue = this.ReadStringByIndex(reader);
if (dimensionValue.Length > maxMetricDimensionValueStringsLength)
{
throw new MetricSerializationException($"Dimension value string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricDimensionValueStringsLength}, Value:{dimensionValue}.", null, true);
}
this.reusableStringsList.Add(dimensionValue);
}
var samplingTypes = (SamplingTypes)SerializationUtils.ReadUInt32FromBase128(reader);
var isDouble = (samplingTypes & SamplingTypes.DoubleValueType) != 0;
var isDoubleStoredAslong = (samplingTypes & SamplingTypes.DoubleValueStoredAsLongType) != 0;
if ((samplingTypes & SamplingTypes.Min) != 0)
{
min = this.ReadMetricValue(reader, isDouble, isDoubleStoredAslong);
}
if ((samplingTypes & SamplingTypes.Max) != 0)
{
max = this.ReadMetricValue(reader, isDouble, isDoubleStoredAslong);
}
if ((samplingTypes & SamplingTypes.Sum) != 0)
{
sum = this.ReadMetricValue(reader, isDouble, isDoubleStoredAslong);
}
if ((samplingTypes & SamplingTypes.Count) != 0)
{
count = SerializationUtils.ReadUInt32FromBase128(reader);
}
if ((samplingTypes & SamplingTypes.SumOfSquareDiffFromMean) != 0)
{
sumOfSquareDiffFromMean = reader.ReadDouble();
}
bool haveHistogram = (samplingTypes & SamplingTypes.Histogram) != 0;
metricBuilder.BeginMetricCreation(metadata, this.reusableStringsList, timeUtc, samplingTypes, count, sum, min, max, sumOfSquareDiffFromMean);
this.reusableStringsList.Clear();
if (haveHistogram)
{
IEnumerable<KeyValuePair<ulong, uint>> histogramBuckets =
SerializationUtils.ReadHistogram(reader, hasHistogramSizePrefix: version > 3);
this.histogramBuffer.Clear();
this.histogramBuffer.AddRange(histogramBuckets);
metricBuilder.AssignHistogram(this.histogramBuffer);
}
if ((samplingTypes & SamplingTypes.HyperLogLogSketch) != 0)
{
var sizeOfHyperLogLogSketches = reader.ReadInt32();
metricBuilder.AssignHyperLogLogSketch(reader, sizeOfHyperLogLogSketches);
}
if (version >= 6)
{
bool haveTDigest = (samplingTypes & SamplingTypes.TDigest) != 0;
bool readTDigest = false;
// starting from version 6, there is a list of TLV-type
// (https://en.wikipedia.org/wiki/Type-length-value) tuples in the rest of the serialized metric.
// deserialize all of them ignoring the unknown ones.
// list TLV values is expected to contain a single end-of-list marker in the end with T = 0x00
uint type;
while ((type = SerializationUtils.ReadUInt32FromBase128(reader)) != 0x00)
{
int length = (int)SerializationUtils.ReadUInt32FromBase128(reader);
long nextPos = readerStream.Position + length;
switch (type)
{
case TDigestPrefixValue: // 0x74 == 't' (tDigest)
if (haveTDigest)
{
if (!readTDigest)
{
metricBuilder.AssignTDigest(reader, length);
readTDigest = true;
}
else
{
// if we already saw tDigest value and see it
// second time it is a sign of a protocol error.
throw new MetricSerializationException("Saw 2 TDigest values for the same metric", null, isInvalidData: true);
}
}
else
{
// if TLV list contains tDigest but the sampling types does not
// it is a protocol error
throw new MetricSerializationException("Sampling types do not contain tDigest, but TLV list contains it", null, isInvalidData: true);
}
break;
default:
// ignore unknown types
break;
}
// always set the position to point to the next entry.
// do not trust the deserializer code to leave the position set correctly
// this helps prevent compatibility problems and makes the deserializer
// more stable
readerStream.Position = nextPos;
}
if (haveTDigest && !readTDigest)
{
// if sampling types contain TDigest but we have not seen it in TLV this is
// a sign of protocol error
throw new MetricSerializationException("Sampling types contain tDigest, but TLV list does not contain it", null, isInvalidData: true);
}
}
metricBuilder.EndMetricCreation();
}
}
private string ReadStringByIndex(BinaryReader reader)
{
var index = (int)SerializationUtils.ReadUInt32FromBase128(reader);
return this.stringsDictionary[index];
}
private TMetadata ReadMetricMetadataByIndex(BinaryReader reader)
{
var index = (int)SerializationUtils.ReadUInt32FromBase128(reader);
return this.metadataDictionary[index];
}
private TMetadata ReadMetricMetadata(BinaryReader reader, IFrontEndMetricBuilder<TMetadata> metricBuilder, int maxMetricStringsLength, int maxMetricNamespaceStringsLength)
{
var metricNamespace = this.ReadStringByIndex(reader);
if (metricNamespace.Length > maxMetricNamespaceStringsLength)
{
throw new MetricSerializationException(
$"Namespace string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricNamespaceStringsLength}, Value:{metricNamespace}.", null, true);
}
var metricName = this.ReadStringByIndex(reader);
if (metricName.Length > maxMetricStringsLength)
{
throw new MetricSerializationException(
$"Metric name string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricStringsLength}, Value:{metricName}.", null, true);
}
var count = SerializationUtils.ReadUInt32FromBase128(reader);
for (var i = 0; i < count; ++i)
{
var dimensionName = this.ReadStringByIndex(reader);
if (dimensionName.Length > maxMetricStringsLength)
{
throw new MetricSerializationException(
$"Dimension name string in the packet exceeds preconfigured length. Packet is corrupted. MaxLength:{maxMetricStringsLength}, Value:{dimensionName}.", null, true);
}
this.reusableStringsList.Add(dimensionName);
}
var result = metricBuilder.CreateMetadata(metricNamespace, metricName, this.reusableStringsList);
this.reusableStringsList.Clear();
return result;
}
private MetricValueV2 ReadMetricValue(BinaryReader reader, bool isDouble, bool isDoubleStoredAsLong)
{
if (isDouble)
{
if (isDoubleStoredAsLong)
{
return new MetricValueV2 { ValueAsDouble = SerializationUtils.ReadInt64FromBase128(reader) };
}
else
{
return new MetricValueV2 { ValueAsDouble = reader.ReadDouble() };
}
}
else
{
return new MetricValueV2 { ValueAsULong = SerializationUtils.ReadUInt64FromBase128(reader) };
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IMonitoringAccount.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
/// <summary>
/// This object represents one MDM Monitoring Account, which is used to grant permission
/// to users, certificates, and groups.
/// </summary>
public interface IMonitoringAccount
{
/// <summary>
/// The name of the monitoring account.
/// </summary>
string Name { get; }
/// <summary>
/// The display name of the monitoring account.
/// </summary>
string DisplayName { get; set; }
/// <summary>
/// The description of the monitoring account.
/// </summary>
string Description { get; set; }
/// <summary>
/// The host name of the MDM stamp that currently owns this account.
/// </summary>
string HomeStampHostName { get; }
/// <summary>
/// The list of entities that have access to this MDM account and their roles.
/// </summary>
IEnumerable<IPermissionV2> Permissions { get; }
/// <summary>
/// The time the account was last updated.
/// </summary>
DateTime LastUpdatedTimeUtc { get; }
/// <summary>
/// The identity that updated the account most recently.
/// </summary>
string LastUpdatedBy { get; }
/// <summary>
/// The version of the monitoring account configuration.
/// </summary>
uint Version { get; }
/// <summary>
/// Gets the list of mirror monitoring accounts.
/// </summary>
IEnumerable<string> MirrorMonitoringAccountList { get; }
/// <summary>
/// Adds a permission to the account.
/// </summary>
/// <param name="permission">Permission to add.</param>
void AddPermission(IPermissionV2 permission);
/// <summary>
/// Remove permission from the account.
/// </summary>
/// <param name="permission">Permission to remove.</param>
void RemovePermission(IPermissionV2 permission);
/// <summary>
/// Adds a monitoring account to the mirror monitoring account list.
/// </summary>
/// <param name="monitoringAccountName">Name of the monitoring account.</param>
void AddMirrorMonitoringAccount(string monitoringAccountName);
/// <summary>
/// Removes a monitoring account from the mirror monitoring account list.
/// </summary>
/// <param name="monitoringAccountName">Name of the monitoring account.</param>
void RemoveMirrorMonitoringAccount(string monitoringAccountName);
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricDefinitionV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System;
using System.Collections.Generic;
/// <summary>
/// Represents a metric definition.
/// A metric definition uniquely identifies a metric name, set of dimensions emitted for this metric by a client.
/// </summary>
public sealed class MetricDefinitionV2 : IEquatable<MetricDefinitionV2>
{
private int hashCode;
/// <summary>
/// Initializes a new instance of the <see cref="MetricDefinitionV2"/> class.
/// </summary>
/// <param name="monitoringAccount">Metric monitoring account.</param>
/// <param name="metricNamespace">Metric namespace name.</param>
/// <param name="metricName">Metric name.</param>
/// <param name="dimensionNames">Dimension names enumeration for the metric definition.</param>
public MetricDefinitionV2(string monitoringAccount, string metricNamespace, string metricName, IEnumerable<string> dimensionNames)
{
this.MonitoringAccount = monitoringAccount;
this.MetricNamespace = metricNamespace;
this.MetricName = metricName;
this.DimensionNames = new SortedSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var dimensionName in dimensionNames)
{
this.DimensionNames.Add(dimensionName);
}
this.hashCode = 524287;
var temp = StringComparer.OrdinalIgnoreCase.GetHashCode(monitoringAccount);
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp))) ^ temp;
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp)));
temp = StringComparer.OrdinalIgnoreCase.GetHashCode(metricNamespace);
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp))) ^ temp;
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp)));
temp = StringComparer.OrdinalIgnoreCase.GetHashCode(metricName);
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp))) ^ temp;
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp)));
foreach (var currentDimensionValue in this.DimensionNames)
{
if (string.IsNullOrWhiteSpace(currentDimensionValue))
{
throw new ArgumentException("Dimension names cannot be null or empty strings.", nameof(dimensionNames));
}
temp = StringComparer.OrdinalIgnoreCase.GetHashCode(currentDimensionValue);
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp))) ^ temp;
this.hashCode = (int)(((uint)this.hashCode << temp) | ((uint)this.hashCode >> (32 - temp)));
}
}
/// <summary>
/// The name of the monitoring account the metric belongs too.
/// </summary>
public string MonitoringAccount { get; private set; }
/// <summary>
/// The name of the namespace the metric belongs too.
/// </summary>
public string MetricNamespace { get; private set; }
/// <summary>
/// The metric name for current metric definition.
/// </summary>
public string MetricName { get; private set; }
/// <summary>
/// List of dimensions emitted for this metric definition instance.
/// </summary>
public SortedSet<string> DimensionNames { get; private set; }
/// <inheritdoc/>
public override int GetHashCode()
{
return this.hashCode;
}
/// <inheritdoc/>
public override bool Equals(object obj)
{
return this.Equals(obj as MetricDefinitionV2);
}
/// <inheritdoc/>
public bool Equals(MetricDefinitionV2 other)
{
if (other == null)
{
return false;
}
if (ReferenceEquals(this, other))
{
return true;
}
if (this.hashCode != other.hashCode ||
this.DimensionNames.Count != other.DimensionNames.Count ||
!this.MonitoringAccount.Equals(other.MonitoringAccount) ||
!this.MonitoringAccount.Equals(other.MetricNamespace) ||
!this.MonitoringAccount.Equals(other.MetricName))
{
return false;
}
return this.DimensionNames.SetEquals(other.DimensionNames);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="DimensionConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Configuration
{
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
/// <summary>
/// Represents the configuration of a dimension.
/// </summary>
public sealed class DimensionConfiguration
{
/// <summary>
/// Initializes a new instance of the <see cref="DimensionConfiguration"/> class.
/// </summary>
/// <param name="id">The dimension name.</param>
public DimensionConfiguration(string id)
: this(id, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="DimensionConfiguration"/> class.
/// </summary>
/// <param name="id">The dimension name.</param>
/// <param name="dimensionValuesToIgnore">The dimension values to ignore.</param>
[JsonConstructor]
public DimensionConfiguration(string id, IList<string> dimensionValuesToIgnore)
{
if (string.IsNullOrEmpty(id))
{
throw new ArgumentException("id is null or empty.");
}
this.Id = id;
this.DimensionValuesToIgnore = dimensionValuesToIgnore;
}
/// <summary>
/// Gets the identifier of this instance.
/// </summary>
public string Id { get; private set; }
/// <summary>
/// Gets or sets the dimension values to ignore.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public IList<string> DimensionValuesToIgnore { get; set; }
/// <summary>
/// Gets or sets the identifier.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public long Identifier { get; set; }
/// <summary>
/// Validates the current instance and throws a <see cref="ArgumentException"/> if the instance is invalid.
/// </summary>
public void Validate()
{
if (string.IsNullOrEmpty(this.Id))
{
throw new ArgumentException("Property 'Id' is null or empty.");
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Certificate.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using Newtonsoft.Json;
/// <summary>
/// Certificate with access to MDM.
/// </summary>
[Obsolete]
public sealed class Certificate : IPermission
{
/// <summary>
/// Initializes a new instance of the <see cref="Certificate"/> class.
/// Create a new certificate for MDM access.
/// </summary>
/// <param name="identity">The thumbprint of the certificate in hexidecimal form.</param>
/// <param name="role">The role granted to this certificate.</param>
[JsonConstructor]
public Certificate(string identity, Role role)
{
if (string.IsNullOrWhiteSpace(identity))
{
throw new ArgumentNullException(nameof(identity));
}
this.Identity = identity;
this.Role = role;
}
/// <summary>
/// The identity to grant permission.
/// </summary>
public string Identity { get; }
/// <summary>
/// The level of access to be granted to this identity.
/// </summary>
public Role Role { get; set; }
}
}
<file_sep>using System.Collections.Generic;
namespace AzSignalR.Monitor.JobRegistry
{
public class SignalRResourceMeta
{
public string Subscription { get; set; }
public string ResourceGroup { get; set; }
public string ResourceName { get; set; }
public string KubeId { get; set; }
public string RowGuid { get; set; }
public static SignalRResourceMeta FromDict(IDictionary<string, string> labels)
{
labels.TryGetValue("subscription", out var subscription);
labels.TryGetValue("resourceGroup", out var resourceGroup);
labels.TryGetValue("resourceName", out var resourceName);
labels.TryGetValue("resourceKubeId", out var kubeId);
labels.TryGetValue("resourceRowGuid", out var rowGuid);
if (string.IsNullOrEmpty(subscription)
&& string.IsNullOrEmpty(resourceGroup)
&& string.IsNullOrEmpty(resourceName)
&& string.IsNullOrEmpty(kubeId)
&& string.IsNullOrEmpty(rowGuid))
{
return null;
}
var meta = new SignalRResourceMeta
{
Subscription = subscription,
ResourceGroup = resourceGroup,
ResourceName = resourceName,
KubeId = kubeId,
RowGuid = rowGuid,
};
return meta;
}
}
}
<file_sep>// <copyright file="PreaggregateFilters.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ReSharper disable once CheckNamespace
namespace Microsoft.Cloud.Metrics.Client.PreaggregateFiltersManagement
{
using System;
using System.Collections.Generic;
using Metrics;
using Newtonsoft.Json;
/// <summary>
/// Represent a set of filters for a single pre-aggregate.
/// </summary>
[JsonObject]
public sealed class PreaggregateFilters
{
/// <summary>
/// Initializes a new instance of the <see cref="PreaggregateFilters"/> class.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="preaggregateDimensionNames">The preaggregate dimension names.</param>
/// <param name="filterValues">The filter values.</param>
public PreaggregateFilters(
string monitoringAccount,
string metricNamespace,
string metricName,
IEnumerable<string> preaggregateDimensionNames,
IReadOnlyList<DimensionFilter> filterValues)
{
if (string.IsNullOrEmpty(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrEmpty(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrEmpty(metricName))
{
throw new ArgumentNullException(nameof(metricName));
}
if (preaggregateDimensionNames == null)
{
throw new ArgumentNullException(nameof(preaggregateDimensionNames));
}
if (filterValues == null)
{
throw new ArgumentNullException(nameof(filterValues));
}
if (filterValues.Count == 0)
{
throw new ArgumentException($"{nameof(filterValues)} cannot be empty");
}
this.MonitoringAccount = monitoringAccount;
this.MetricNamespace = metricNamespace;
this.MetricName = metricName;
var preaggregateDimensionNamesSet = new SortedSet<string>(preaggregateDimensionNames, StringComparer.OrdinalIgnoreCase);
if (preaggregateDimensionNamesSet.Count == 0)
{
throw new ArgumentException($"{nameof(preaggregateDimensionNames)} cannot be empty");
}
this.PreaggregateDimensionNames = preaggregateDimensionNamesSet;
foreach (string dim in this.PreaggregateDimensionNames)
{
if (string.IsNullOrWhiteSpace(dim))
{
throw new ArgumentException($"{nameof(preaggregateDimensionNames)} cannot have empty or null values");
}
}
this.DimensionFilters = filterValues;
var serverRepresentationOfFilterValues = new List<PreaggregateDimensionFilterValues>();
foreach (DimensionFilter filterValue in filterValues)
{
if (filterValue.IsExcludeFilter)
{
throw new ArgumentException($"{nameof(filterValues)} are not allowed to have exclude filters. Dimension Name with exclude filters:{filterValue.DimensionName}");
}
serverRepresentationOfFilterValues.Add(new PreaggregateDimensionFilterValues(filterValue.DimensionName, filterValue.DimensionValues));
}
this.FilterValues = serverRepresentationOfFilterValues;
}
/// <summary>
/// Initializes a new instance of the <see cref="PreaggregateFilters"/> class.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="preaggregateDimensionNames">The preaggregate dimension names.</param>
/// <param name="filterValues">The filter values.</param>
[JsonConstructor]
internal PreaggregateFilters(
string monitoringAccount,
string metricNamespace,
string metricName,
IEnumerable<string> preaggregateDimensionNames,
IReadOnlyList<PreaggregateDimensionFilterValues> filterValues)
{
this.MonitoringAccount = monitoringAccount;
this.MetricNamespace = metricNamespace;
this.MetricName = metricName;
var preaggregateDimensionNamesSet = new SortedSet<string>(preaggregateDimensionNames, StringComparer.OrdinalIgnoreCase);
this.PreaggregateDimensionNames = preaggregateDimensionNamesSet;
this.FilterValues = filterValues;
var dimensionFilters = new List<DimensionFilter>(this.FilterValues.Count);
foreach (PreaggregateDimensionFilterValues filter in this.FilterValues)
{
dimensionFilters.Add(DimensionFilter.CreateIncludeFilter(filter.FilterDimensionName, filter.FilterValues));
}
this.DimensionFilters = dimensionFilters;
}
/// <summary>
/// Gets or sets the monitoring account.
/// </summary>
public string MonitoringAccount { get; }
/// <summary>
/// Gets or sets the metric namespace.
/// </summary>
public string MetricNamespace { get; }
/// <summary>
/// Gets or sets the name of the metric.
/// </summary>
public string MetricName { get; }
/// <summary>
/// Gets or sets the preaggregate dimension names.
/// </summary>
public IEnumerable<string> PreaggregateDimensionNames { get; }
/// <summary>
/// Gets the filter values.
/// </summary>
[JsonIgnore]
public IReadOnlyList<DimensionFilter> DimensionFilters { get; }
/// <summary>
/// Gets the filter values representation of server side.
/// </summary>
[JsonProperty]
internal IReadOnlyList<PreaggregateDimensionFilterValues> FilterValues { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="QueryResultListV3.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
/// <summary>
/// Holds the list of query results and the associated metadata.
/// </summary>
/// <seealso cref="IQueryResultListV3" />
public sealed class QueryResultListV3 : IQueryResultListV3
{
/// <summary>
/// Initializes a new instance of the <see cref="QueryResultListV3"/> class.
/// </summary>
/// <param name="startTimeUtc">The start time.</param>
/// <param name="endTimeUtc">The end time.</param>
/// <param name="timeResolutionInMinutes">The time resolution in milliseconds.</param>
/// <param name="results">The result time series.</param>
[JsonConstructor]
internal QueryResultListV3(DateTime startTimeUtc, DateTime endTimeUtc, int timeResolutionInMinutes, IReadOnlyList<IQueryResultV3> results)
{
this.StartTimeUtc = startTimeUtc;
this.EndTimeUtc = endTimeUtc;
this.TimeResolutionInMinutes = timeResolutionInMinutes;
this.Results = results;
}
/// <summary>
/// Gets the end time in UTC for the query results.
/// </summary>
public DateTime EndTimeUtc { get; }
/// <summary>
/// Gets the start time in UTC for the query results.
/// </summary>
public DateTime StartTimeUtc { get; }
/// <summary>
/// Gets the time resolution in milliseconds for the query results.
/// </summary>
public int TimeResolutionInMinutes { get; }
/// <summary>
/// Gets the query results. Each result represent a single time series where start time, end time and time resolution
/// is represented by this object members.
/// </summary>
public IReadOnlyList<IQueryResultV3> Results { get; }
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="MetricNamespaceConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Configuration
{
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
/// <summary>
/// The data structure that the batch API for fetching metric configuration returns per namespace
/// in the new (Monitoring Agent) pipeline. This is JSON serialized to the clients.
/// </summary>
public sealed class MetricNamespaceConfiguration
{
private DateTime updatedAtUtc;
/// <summary>
/// Initializes a new instance of the <see cref="MetricNamespaceConfiguration"/> class.
/// </summary>
public MetricNamespaceConfiguration()
{
this.MetricConfigurations = new HashSet<MetricConfigurationV2>();
}
/// <summary>
/// Gets or sets the metric namespace.
/// </summary>
[JsonProperty(PropertyName = "m")]
public string MetricNamespace { get; set; }
/// <summary>
/// Gets or sets the timestamp in UTC of the last updated metric in this namespace.
/// </summary>
[JsonProperty(PropertyName = "u")]
public DateTime UpdatedAtUtc
{
get
{
return this.updatedAtUtc;
}
set
{
// Metrics extension today ignores sub-seconds, so we need to work around it for now.
// We also always advance this property with the recent configuration cache refresh time
// to avoid missing configurations, even if no configuration was updated at all.
this.updatedAtUtc = TruncateToIntegralSeconds(value);
}
}
/// <summary>
/// Gets or sets the array of metric configurations.
/// </summary>
[JsonProperty(PropertyName = "c")]
public HashSet<MetricConfigurationV2> MetricConfigurations { get; set; }
/// <summary>
/// Truncates to integral seconds.
/// </summary>
/// <param name="dateTime">The date time.</param>
/// <returns>
/// The trucated date time.
/// </returns>
private static DateTime TruncateToIntegralSeconds(DateTime dateTime)
{
return dateTime.AddTicks(-(dateTime.Ticks % TimeSpan.TicksPerSecond));
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="Preaggregation.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
/// <summary>
/// A grouping of dimensions used to aggregate metric data.
/// </summary>
public sealed class Preaggregation : IPreaggregation
{
private readonly List<string> dimensions;
private string name;
private IMinMaxConfiguration minMaxConfiguration;
private IPercentileConfiguration percentileConfiguration;
private IRollupConfiguration rollupConfiguration;
private IPublicationConfiguration publicationConfiguration;
private IDistinctCountConfiguration distinctCountConfiguration;
private IFilteringConfiguration filteringConfiguration;
/// <summary>
/// Initializes a new instance of the <see cref="Preaggregation"/> class.
/// Creates a new preaggregate.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="dimensions">The dimensions.</param>
/// <param name="minMaxConfiguration">The minimum maximum configuration.</param>
/// <param name="percentileConfiguration">The percentile configuration.</param>
/// <param name="rollupConfiguration">The rollup configuration.</param>
/// <param name="publicationConfiguration">The publication configuration.</param>
/// <param name="distinctCountConfiguration">The distinct count configuration.</param>
/// <param name="filteringConfiguration">The filtering configuration.</param>
[JsonConstructor]
internal Preaggregation(
string name,
IEnumerable<string> dimensions,
MinMaxConfiguration minMaxConfiguration,
PercentileConfiguration percentileConfiguration,
RollupConfiguration rollupConfiguration,
PublicationConfiguration publicationConfiguration,
DistinctCountConfiguration distinctCountConfiguration,
IFilteringConfiguration filteringConfiguration)
{
this.Name = name;
this.dimensions = dimensions.ToList();
this.minMaxConfiguration = minMaxConfiguration;
this.percentileConfiguration = percentileConfiguration;
this.rollupConfiguration = rollupConfiguration;
this.publicationConfiguration = publicationConfiguration;
this.distinctCountConfiguration = distinctCountConfiguration;
this.filteringConfiguration = filteringConfiguration;
}
/// <summary>
/// Gets or sets the name of the preaggregate.
/// </summary>
public string Name
{
get
{
return this.name;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.name = value;
}
}
/// <summary>
/// Gets the dimensions of the preaggregate in sorted order.
/// </summary>
public IEnumerable<string> Dimensions
{
get { return this.dimensions; }
}
/// <summary>
/// The min/max sampling type configuration.
/// </summary>
public IMinMaxConfiguration MinMaxConfiguration
{
get
{
return this.minMaxConfiguration;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.minMaxConfiguration = value;
}
}
/// <summary>
/// The filtering type configuration.
/// </summary>
public IFilteringConfiguration FilteringConfiguration
{
get
{
return this.filteringConfiguration;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.filteringConfiguration = value;
}
}
/// <summary>
/// The percentile sampling type configuration.
/// </summary>
public IPercentileConfiguration PercentileConfiguration
{
get
{
return this.percentileConfiguration;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.percentileConfiguration = value;
}
}
/// <summary>
/// The data rollup configuration.
/// </summary>
public IRollupConfiguration RollupConfiguration
{
get
{
return this.rollupConfiguration;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.rollupConfiguration = value;
}
}
/// <summary>
/// The metric data store configuration.
/// </summary>
public IPublicationConfiguration PublicationConfiguration
{
get
{
return this.publicationConfiguration;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.publicationConfiguration = value;
}
}
/// <summary>
/// The distinct count sampling type configuration.
/// </summary>
public IDistinctCountConfiguration DistinctCountConfiguration
{
get
{
return this.distinctCountConfiguration;
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
this.distinctCountConfiguration = value;
}
}
/// <summary>
/// Creates a new preaggregate with defaults for the configuration flags.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="dimensions">The dimensions.</param>
/// <returns>The created preaggregate.</returns>
public static Preaggregation CreatePreaggregation(string name, IEnumerable<string> dimensions)
{
return CreatePreaggregationImpl(
name,
dimensions,
Configuration.MinMaxConfiguration.MinMaxDisabled,
Configuration.PercentileConfiguration.PercentileDisabled,
Configuration.RollupConfiguration.RollupDisabled,
Configuration.PublicationConfiguration.CacheServer,
new DistinctCountConfiguration(),
Configuration.FilteringConfiguration.FilteringDisabled);
}
/// <summary>
/// Creates a new filtered metrics store preaggregate with defaults for the configuration flags.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="dimensions">The dimensions.</param>
/// <param name="minMaxConfiguration">The minimum maximum configuration. Default value is Configuration.MinMaxConfiguration.MinMaxDisabled.</param>
/// <param name="percentileConfiguration">The percentile configuration. Default value is Configuration.PercentileConfiguration.PercentileDisabled.</param>
/// <param name="rollupConfiguration">The rollup configuration. Default value is Configuration.RollupConfiguration.RollupDisabled.</param>
/// <param name="metricStoreConfiguration">The metric store configuration. Default value is Configuration.PublicationConfiguration.AggregatedMetricsStore.</param>
/// <param name="distinctCountConfiguration">The distinct count configuration. Default value is new DistinctCountConfiguration().</param>
/// <param name="filteringConfiguration">The filtering configuration. Default value is Configuration.FilteringConfiguration.FilteringDisabled.</param>
/// <returns>The created preaggregate.</returns>
public static Preaggregation CreatePreaggregationWithDefaults(
string name,
IEnumerable<string> dimensions,
MinMaxConfiguration minMaxConfiguration = null,
PercentileConfiguration percentileConfiguration = null,
RollupConfiguration rollupConfiguration = null,
PublicationConfiguration metricStoreConfiguration = null,
DistinctCountConfiguration distinctCountConfiguration = null,
IFilteringConfiguration filteringConfiguration = null)
{
return CreatePreaggregationImpl(
name,
dimensions,
minMaxConfiguration ?? Configuration.MinMaxConfiguration.MinMaxDisabled,
percentileConfiguration ?? Configuration.PercentileConfiguration.PercentileDisabled,
rollupConfiguration ?? Configuration.RollupConfiguration.RollupDisabled,
metricStoreConfiguration ?? (distinctCountConfiguration?.Dimensions.Any() == true ? Configuration.PublicationConfiguration.CacheServer : Configuration.PublicationConfiguration.MetricStore),
distinctCountConfiguration ?? new DistinctCountConfiguration(),
filteringConfiguration ?? Configuration.FilteringConfiguration.FilteringDisabled);
}
/// <summary>
/// Creates a new preaggregate.
/// </summary>
/// <remarks>
/// This is older legacy api for backward compatibility. Add new properties to CreatePreaggregationWithDefaultConfiguration instead.
/// </remarks>
/// <param name="name">The name.</param>
/// <param name="dimensions">The dimensions.</param>
/// <param name="minMaxConfiguration">The minimum maximum configuration.</param>
/// <param name="percentileConfiguration">The percentile configuration.</param>
/// <param name="rollupConfiguration">The rollup configuration.</param>
/// <param name="metricStoreConfiguration">The metric store configuration.</param>
/// <param name="distinctCountConfiguration">The distinct count configuration.</param>
/// <returns>The created preaggregate.</returns>
[Obsolete("CreatePreaggregation is deprecated, please use CreatePreaggregationWithDefaults instead.")]
public static Preaggregation CreatePreaggregation(
string name,
IEnumerable<string> dimensions,
MinMaxConfiguration minMaxConfiguration,
PercentileConfiguration percentileConfiguration,
RollupConfiguration rollupConfiguration,
PublicationConfiguration metricStoreConfiguration,
DistinctCountConfiguration distinctCountConfiguration)
{
return CreatePreaggregationImpl(
name,
dimensions,
minMaxConfiguration,
percentileConfiguration,
rollupConfiguration,
metricStoreConfiguration,
distinctCountConfiguration,
Configuration.FilteringConfiguration.FilteringDisabled);
}
/// <summary>
/// Adds the dimension to the preaggregate.
/// </summary>
/// <param name="dimensionToAdd">Name of the dimension to add.</param>
public void AddDimension(string dimensionToAdd)
{
if (string.IsNullOrWhiteSpace(dimensionToAdd))
{
throw new ArgumentNullException(nameof(dimensionToAdd));
}
var index = 0;
for (; index < this.dimensions.Count; ++index)
{
var comparison = string.Compare(this.dimensions[index], dimensionToAdd, StringComparison.OrdinalIgnoreCase);
if (comparison == 0)
{
throw new ConfigurationValidationException("Cannot add duplicate dimensions.", ValidationType.DuplicateDimension);
}
if (comparison > 0)
{
break;
}
}
if ((index + 1) == this.dimensions.Count)
{
this.dimensions.Add(dimensionToAdd);
}
else
{
this.dimensions.Insert(index, dimensionToAdd);
}
}
/// <summary>
/// Removes the dimension from the preaggregate.
/// </summary>
/// <param name="dimensionName">Name of the dimension.</param>
public void RemoveDimension(string dimensionName)
{
this.dimensions.RemoveAll(x => string.Equals(x, dimensionName, StringComparison.OrdinalIgnoreCase));
}
/// <summary>
/// Creates a new preaggregate.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="dimensions">The dimensions.</param>
/// <param name="minMaxConfiguration">The minimum maximum configuration.</param>
/// <param name="percentileConfiguration">The percentile configuration.</param>
/// <param name="rollupConfiguration">The rollup configuration.</param>
/// <param name="metricStoreConfiguration">The metric store configuration.</param>
/// <param name="distinctCountConfiguration">The distinct count configuration.</param>
/// <param name="filteringConfiguration">The filtering configuration.</param>
/// <returns>The created preaggregate.</returns>
private static Preaggregation CreatePreaggregationImpl(
string name,
IEnumerable<string> dimensions,
MinMaxConfiguration minMaxConfiguration,
PercentileConfiguration percentileConfiguration,
RollupConfiguration rollupConfiguration,
PublicationConfiguration metricStoreConfiguration,
DistinctCountConfiguration distinctCountConfiguration,
IFilteringConfiguration filteringConfiguration)
{
if (string.IsNullOrWhiteSpace(name))
{
throw new ArgumentNullException(nameof(name));
}
if (dimensions == null)
{
throw new ArgumentNullException(nameof(dimensions));
}
var dimensionList = dimensions.ToList();
dimensionList.Sort(StringComparer.OrdinalIgnoreCase);
for (var i = 0; i < (dimensionList.Count - 1); i++)
{
if (string.Equals(dimensionList[i], dimensionList[i + 1], StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException("Cannot create a preaggregate with duplicate dimensions.");
}
}
if (minMaxConfiguration == null)
{
throw new ArgumentNullException(nameof(minMaxConfiguration));
}
if (percentileConfiguration == null)
{
throw new ArgumentNullException(nameof(percentileConfiguration));
}
if (rollupConfiguration == null)
{
throw new ArgumentNullException(nameof(rollupConfiguration));
}
if (metricStoreConfiguration == null)
{
throw new ArgumentNullException(nameof(metricStoreConfiguration));
}
if (distinctCountConfiguration == null)
{
throw new ArgumentNullException(nameof(distinctCountConfiguration));
}
if (filteringConfiguration == null)
{
throw new ArgumentNullException(nameof(filteringConfiguration));
}
return new Preaggregation(
name,
dimensionList,
minMaxConfiguration,
percentileConfiguration,
rollupConfiguration,
metricStoreConfiguration,
distinctCountConfiguration,
filteringConfiguration);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="UnixEpochHelper.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
/// <summary>
/// Methods and values related to time.
/// </summary>
/// <remarks>
/// The formal definition of unix time is the # of seconds that have elapsed since UTC
/// DateTime(1970, 1, 1, 0, 0, 0). The metrics system refers to unix time as the number of milliseconds
/// that have elapsed since UTC DateTime(1970, 1, 1, 0, 0, 0).
/// </remarks>
internal static class UnixEpochHelper
{
/// <summary>
/// The number of ticks in a millisecond.
/// </summary>
internal const long TicksPerMillisecond = 10000;
/// <summary>
/// The Unix epoch (January 1, 1970)
/// </summary>
internal static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0);
/// <summary>
/// The milliseconds between the Unix epoch (January 1, 1970) and the Microsoft epoch (January 1, 0001).
/// </summary>
private static readonly long UnixEpochMsEpochDeltaMillis = UnixEpoch.Ticks / TicksPerMillisecond;
/// <summary>
/// Gets the Unix time for the given <paramref name="utcTime"/>.
/// </summary>
/// <param name="utcTime">A given UTC time</param>
/// <returns>The Unix time for the given <paramref name="utcTime"/></returns>
internal static long GetMillis(DateTime utcTime)
{
return (utcTime.Ticks / TicksPerMillisecond) - UnixEpochMsEpochDeltaMillis;
}
/// <summary>
/// Gets the DateTime given the Unix time
/// </summary>
/// <param name="millis">The Unix time</param>
/// <returns>The DateTime for the Unix time</returns>
internal static DateTime FromMillis(long millis)
{
return new DateTime(TicksPerMillisecond * (millis + UnixEpochMsEpochDeltaMillis), DateTimeKind.Utc);
}
}
}
<file_sep>// <copyright file="PreaggregateDimensionFilterValues.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ReSharper disable once CheckNamespace
namespace Microsoft.Cloud.Metrics.Client.PreaggregateFiltersManagement
{
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
/// <summary>
/// Represents filter values for a single preaggregate dimension name.
/// </summary>
[JsonObject]
internal sealed class PreaggregateDimensionFilterValues
{
/// <summary>
/// Initializes a new instance of the <see cref="PreaggregateDimensionFilterValues"/> class.
/// </summary>
/// <param name="filterDimensionName">Name of the filter dimension.</param>
/// <param name="filterValues">The filter values.</param>
[JsonConstructor]
public PreaggregateDimensionFilterValues(string filterDimensionName, IReadOnlyList<string> filterValues)
{
if (string.IsNullOrEmpty(filterDimensionName))
{
throw new ArgumentNullException(nameof(filterDimensionName));
}
if (filterValues == null)
{
throw new ArgumentNullException(nameof(filterValues));
}
foreach (var value in filterValues)
{
if (string.IsNullOrEmpty(value))
{
throw new ArgumentException($"{nameof(filterValues)} cannot have empty of null values");
}
}
this.FilterDimensionName = filterDimensionName;
this.FilterValues = new HashSet<string>(filterValues, StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Gets the name of the filter dimension.
/// </summary>
public string FilterDimensionName { get; }
/// <summary>
/// Gets the filter values.
/// </summary>
public ISet<string> FilterValues { get; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client;
using Microsoft.Cloud.Metrics.Client.Metrics;
using Microsoft.Cloud.Metrics.Client.Query;
using Microsoft.Online.Metrics.Serialization.Configuration;
namespace MetricsMonitor
{
public class MdmClient
{
public static readonly string MessageCount = "MessageCount";
public static readonly string PodConnectionCount = "PodConnectionCount";
private readonly X509Certificate2 _certificate;
private readonly MdmEnvironment _mdmEnvironment = MdmEnvironment.Production;
private readonly string _namespace = "Shoebox";
private readonly string _namespace2 = "ShoeboxInternal";
private readonly string _accountPrefix = "MicrosoftSignalRServiceShoebox";
private string _account;
public MdmClient(string certPath, string passwd, string region)
{
_certificate = new X509Certificate2(certPath, passwd);
_account = $"{_accountPrefix}{region}";
}
public async Task<IDictionary<string, double>> GetDimensionCountMetricsAsync(string metricName, TimeSpan backTime, string dimension)
{
if (MessageCount.Equals(metricName))
{
return await GetDimensionMessageCountMetricsAsync(backTime, dimension);
}
else
{
if (PodConnectionCount.Equals(metricName))
{
return await GetDimensionPodConnectionCountMetricsAsync(backTime, dimension);
}
}
return null;
}
public async Task<IDictionary<string, double>> GetDimensionPodConnectionCountMetricsAsync(TimeSpan backTime, string dimension)
{
return await GetDimensionMetricsCountCoreAsync(PodConnectionCount, backTime, dimension);
}
public async Task<IDictionary<string, double>> GetDimensionMessageCountMetricsAsync(TimeSpan backTime, string dimension)
{
return await GetDimensionMetricsCountCoreAsync(MessageCount, backTime, dimension);
}
public static bool isShoeboxNamespace(string metricName)
{
switch (metricName)
{
case "MessageCount":
case "ConnectionCount":
case "InboundTraffic":
case "OutboundTraffic":
return true;
case "PodConnectionCount":
case "RedisPubCount":
case "TotalDelta":
case "ConnectionCountRaw":
return false;
}
return false;
}
public MetricIdentifier GenMetricId(string metricName)
{
if (isShoeboxNamespace(metricName))
{
return new MetricIdentifier(_account, _namespace, metricName);
}
else
{
return new MetricIdentifier(_account, _namespace2, metricName);
}
}
public async Task<IDictionary<string, double>> GetDimensionMetricsCountCoreAsync(string metricsName, TimeSpan backTime, string dimension)
{
IDictionary<string, double> result = new Dictionary<string, double>();
var connectionInfo = new ConnectionInfo(_certificate, _mdmEnvironment);
var metricReader = new MetricReader(connectionInfo);
var metricId = GenMetricId(metricsName);
var dimensionFilters = new List<DimensionFilter>
{
DimensionFilter.CreateIncludeFilter(dimension)
};
try
{
var now = DateTime.UtcNow;
var results = await metricReader.GetTimeSeriesAsync(
metricId,
dimensionFilters,
now - backTime,
now,
new[] { SamplingType.Count },
new SelectionClauseV3(new PropertyDefinition(PropertyAggregationType.Sum, SamplingType.Count), 100_000, OrderBy.Descending),
outputDimensionNames: new List<string> { dimension }
);
return results.Results.ToDictionary(r => r.DimensionList.First().Value, r => r.EvaluatedResult);
}
catch (Exception ex)
{
Console.WriteLine("Failed to get metrics data from Geneva", ex);
return null;
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="HttpClientHelper.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Net.Security;
using System.Reflection;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Logging;
using Newtonsoft.Json;
using Online.Metrics.Serialization;
/// <summary>
/// The http client helper class.
/// </summary>
public static class HttpClientHelper
{
/// <summary>
/// The HTTP status code for too many requests.
/// </summary>
/// <remarks>
/// Refer: http://tools.ietf.org/html/rfc6585.
/// </remarks>
public const int HttpStatusCodeThrottled = 429;
/// <summary>
/// The white listed server subject names.
/// </summary>
public static readonly HashSet<string> WhiteListedServerSubjectNames = new HashSet<string>(new[] { "CN=*.dc.ad.msft.net", "CN=*.test.dc.ad.msft.net", "CN=*.ff.dc.ad.msft.net", "CN=*.test.ff.dc.ad.msft.net", "CN=*.cn.dc.ad.msft.net, O=Shanghai Blue Cloud Technology Co. Ltd, L=Shanghai, C=CN" });
/// <summary>
/// The default maximum web requests per minute
/// </summary>
private const int DefaultMaxWebRequestsPerMinute = 1000;
/// <summary>
/// The throttled identity http header name.
/// </summary>
private const string ThrottledIdentityKey = "Throttled-Identity";
/// <summary>
/// The retry after http header name.
/// </summary>
private const string RetryAfterKey = "Retry-After";
/// <summary>
/// The version of the current metrics client assembly
/// </summary>
private static readonly string AssemblyVersion = Assembly.GetExecutingAssembly().GetName().Version.ToString();
/// <summary>
/// The log identifier for this class
/// </summary>
private static readonly object LogId = Logger.CreateCustomLogId("HttpClientHelper");
/// <summary>
/// The identities throttled by server.
/// Keys are monitoring accounts.
/// Values are throttled identities to DateTime to proceed sending requests.
/// </summary>
private static readonly ConcurrentDictionary<string, ConcurrentDictionary<string, DateTime>> ServerThrottledIdentities;
/// <summary>
/// The current minute of the hour
/// </summary>
private static volatile int currentMinute = DateTime.UtcNow.Minute;
/// <summary>
/// The requests sent in current minute
/// </summary>
private static int requestsSentInCurrentMinute;
/// <summary>
/// Initializes static members of the <see cref="HttpClientHelper"/> class.
/// </summary>
static HttpClientHelper()
{
EnableHttpPipelining = false;
ServerThrottledIdentities = new ConcurrentDictionary<string, ConcurrentDictionary<string, DateTime>>();
}
/// <summary>
/// Gets or sets the max web requests per minute.
/// </summary>
public static ushort MaxWebRequestsPerMinute { get; set; }
/// <summary>
/// Gets or sets a value indicating whether not to set certificate validation callback so that consumers control it.
/// </summary>
public static bool DoNotSetCertificateValidationCallback { get; set; }
/// <summary>
/// Gets or sets a value indicating if http pipelining should be enabled or not for http(s) requests.
///
/// This is disabled by default.
/// </summary>
public static bool EnableHttpPipelining { get; set; }
/// <summary>
/// Gets the authentication header
/// </summary>
/// <returns>The authentication header</returns>
public static AuthenticationHeaderValue GetAuthenticationHeader()
{
return new AuthenticationHeaderValue(UserAccessTokenRefresher.BearerTokenAuthScheme, UserAccessTokenRefresher.Instance.UserAccessToken);
}
/// <summary>
/// Creates the HTTP client with user or cert authentication.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
/// <param name="authHeaderValue">The authentication header value.</param>
/// <returns>An instance of <see cref="HttpClient"/></returns>
public static HttpClient CreateHttpClientWithAuthInfo(ConnectionInfo connectionInfo, string authHeaderValue = null)
{
HttpClient httpClient;
var webRequestHandler = new HttpClientHandler { AllowAutoRedirect = false, };
if (!DoNotSetCertificateValidationCallback)
{
webRequestHandler.ServerCertificateCustomValidationCallback = CertificateValidationCallback;
}
if (connectionInfo.UseAadUserAuthentication)
{
httpClient = new HttpClient(webRequestHandler, disposeHandler: true);
AuthenticationHeaderValue authHeader;
if (authHeaderValue == null)
{
authHeader = GetAuthenticationHeader();
}
else
{
authHeader = new AuthenticationHeaderValue(UserAccessTokenRefresher.BearerTokenAuthScheme, authHeaderValue);
}
httpClient.DefaultRequestHeaders.Authorization = authHeader;
}
else
{
webRequestHandler.ClientCertificates.Add(connectionInfo.Certificate);
httpClient = new HttpClient(webRequestHandler, disposeHandler: true);
}
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue(SerializationConstants.OctetStreamContentType));
httpClient.DefaultRequestHeaders.UserAgent.ParseAdd("MultiDimensionalMetricsClient");
httpClient.DefaultRequestHeaders.Add("MultiDimensionalMetricsClientVersion", AssemblyVersion);
if (connectionInfo.AdditionalDefaultRequestHeaders != null)
{
foreach (var kvp in connectionInfo.AdditionalDefaultRequestHeaders)
{
httpClient.DefaultRequestHeaders.Add(kvp.Key, kvp.Value);
}
}
httpClient.Timeout = connectionInfo.Timeout;
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateClient",
"Created new HttpClient. CertThumbprint:{0}, TimeoutMs:{1}",
connectionInfo.CertificateThumbprint,
connectionInfo.Timeout.TotalMilliseconds);
return httpClient;
}
/// <summary>
/// Creates the HTTP client
/// </summary>
/// <param name="timeout">The timeout to apply to the requests.</param>
/// <returns>
/// An instance of <see cref="HttpClient" />
/// </returns>
public static HttpClient CreateHttpClient(TimeSpan timeout)
{
var webRequestHandler = new HttpClientHandler { AllowAutoRedirect = false };
if (!DoNotSetCertificateValidationCallback)
{
webRequestHandler.ServerCertificateCustomValidationCallback = CertificateValidationCallback;
}
HttpClient httpClient = new HttpClient(webRequestHandler, disposeHandler: true);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue(SerializationConstants.OctetStreamContentType));
httpClient.Timeout = timeout;
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateClient",
"Created new HttpClient. TimeoutMs:{0}",
timeout.TotalMilliseconds);
return httpClient;
}
/// <summary>
/// Callback when a server side certificate is validated.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="certificate">The certificate.</param>
/// <param name="chain">The chain.</param>
/// <param name="sslPolicyErrors">The SSL policy errors.</param>
/// <returns>True if no errors found while validation.</returns>
public static bool CertificateValidationCallback(
object sender,
X509Certificate certificate,
X509Chain chain,
SslPolicyErrors sslPolicyErrors)
{
// MDM is using 3rd party domains, metrics.nsatc.net, to reduce the dependency on MSFT infrasture, so we cannot obtain a certificate matching the domains.
// We will consider moving to MSFT domains but for now we customize the validation on cert subject name for requests toward MDM stamp hostnames or IPs.
return sslPolicyErrors == SslPolicyErrors.None
|| (sslPolicyErrors == SslPolicyErrors.RemoteCertificateNameMismatch && WhiteListedServerSubjectNames.Contains(certificate.Subject));
}
/// <summary>
/// Adds the standard headers to message.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="traceId">The trace identifier that will be placed in a header.</param>
/// <param name="sourceIdentity">The source identity that will be placed in a header.</param>
/// <param name="hostname">The host name of an endpoint.</param>
public static void AddStandardHeadersToMessage(HttpRequestMessage message, Guid traceId, string sourceIdentity, string hostname)
{
message.Headers.Add(SerializationConstants.TraceIdHeader, traceId.ToString("B"));
message.Headers.Add("SourceIdentity", sourceIdentity);
message.Headers.Host = hostname;
}
/// <summary>
/// Gets the response as string.
/// </summary>
/// <param name="url">The URL.</param>
/// <param name="method">The http method.</param>
/// <param name="client">The HTTP client.</param>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="operation">The operation.</param>
/// <param name="httpContent">Content of the HTTP request.</param>
/// <param name="clientId">Optional parameter identifying client.</param>
/// <param name="serializedContent">Serialized content of the HTTP request, if special serialization is needed.</param>
/// <param name="traceId">The trace identifier.</param>
/// <param name="serializationVersion">The serialization version.</param>
/// <param name="numAttempts">The number of attempts.</param>
/// <returns>
/// The HTTP response message as a string.
/// </returns>
/// <remarks>
/// We attempt up to 3 times with delay of 5 seconds and 10 seconds in between respectively, if the request cannot be sent or the response status code is 503.
/// However, we don't want to retry in the OBO case.
/// </remarks>
public static async Task<string> GetResponseAsStringAsync(
Uri url,
HttpMethod method,
HttpClient client,
string monitoringAccount,
string operation,
object httpContent = null,
string clientId = "",
string serializedContent = null,
Guid? traceId = null,
byte serializationVersion = MetricQueryResponseDeserializer.CurrentVersion,
int numAttempts = 3)
{
Tuple<string, HttpResponseMessage> response = null;
try
{
response = await GetResponse(
url,
method,
client,
monitoringAccount,
operation,
httpContent,
clientId,
serializedContent,
traceId,
serializationVersion,
numAttempts)
.ConfigureAwait(false);
return response.Item1;
}
finally
{
response?.Item2?.Dispose();
}
}
/// <summary>
/// Gets the HTTP response message as a string.
/// </summary>
/// <param name="url">The URL.</param>
/// <param name="method">The http method.</param>
/// <param name="client">The HTTP client.</param>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="operation">The operation.</param>
/// <param name="httpContent">Content of the HTTP request.</param>
/// <param name="clientId">Optional parameter identifying client.</param>
/// <param name="serializedContent">Serialized content of the HTTP request, if special serialization is needed.</param>
/// <param name="traceId">The trace identifier.</param>
/// <param name="serializationVersion">The serialization version.</param>
/// <param name="numAttempts">The number of attempts.</param>
/// <returns>
/// The HTTP response message as a string.
/// </returns>
/// <remarks>
/// We attempt up to 3 times with delay of 5 seconds and 10 seconds in between respectively, if the request cannot be sent or the response status code is 503.
/// However, we don't want to retry in the OBO case.
/// </remarks>
public static async Task<Tuple<string, HttpResponseMessage>> GetResponse(
Uri url,
HttpMethod method,
HttpClient client,
string monitoringAccount,
string operation,
object httpContent = null,
string clientId = "",
string serializedContent = null,
Guid? traceId = null,
byte serializationVersion = MetricQueryResponseDeserializer.CurrentVersion,
int numAttempts = 3)
{
const int baseWaitTimeInSeconds = 5;
Exception lastException = null;
if (IsThrottledByServer(monitoringAccount, operation, method))
{
throw new MetricsClientException(
$"The request is throttled by the server. Url:{url}, Method:{method}, Operation:{operation}.",
null,
traceId ?? Guid.Empty,
(HttpStatusCode)HttpStatusCodeThrottled);
}
var stopWatch = Stopwatch.StartNew();
for (int i = 1; i <= numAttempts; i++)
{
try
{
return await GetResponseWithTokenRefresh(url, method, client, httpContent, clientId, serializedContent, traceId, serializationVersion, monitoringAccount).ConfigureAwait(false);
}
catch (MetricsClientException e)
{
lastException = e;
if (stopWatch.Elapsed >= client.Timeout ||
(e.ResponseStatusCode != null && e.ResponseStatusCode != HttpStatusCode.ServiceUnavailable) ||
i == numAttempts)
{
throw;
}
var delay = TimeSpan.FromSeconds(baseWaitTimeInSeconds * i);
Logger.Log(LoggerLevel.Info, LogId, "GetResponse", "Delay {0} and then retry.", delay);
await Task.Delay(delay).ConfigureAwait(false);
}
}
throw new MetricsClientException($"Exhausted {numAttempts} attempts.", lastException);
}
private static async Task<Tuple<string, HttpResponseMessage>> GetResponseWithTokenRefresh(
Uri url,
HttpMethod method,
HttpClient client,
object httpContent,
string clientId,
string serializedContent,
Guid? traceId,
byte serializationVersion,
string monitoringAccount)
{
try
{
return await GetResponseNoRetry(url, method, client, httpContent, clientId, serializedContent, traceId, serializationVersion, monitoringAccount).ConfigureAwait(false);
}
catch (MetricsClientException e)
{
if (e.ResponseStatusCode == HttpStatusCode.Redirect)
{
await UserAccessTokenRefresher.Instance.RefreshAccessToken().ConfigureAwait(false);
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(UserAccessTokenRefresher.BearerTokenAuthScheme, UserAccessTokenRefresher.Instance.UserAccessToken);
return await GetResponseNoRetry(url, method, client, httpContent, clientId, serializedContent, traceId, serializationVersion, monitoringAccount).ConfigureAwait(false);
}
throw;
}
}
private static async Task<Tuple<string, HttpResponseMessage>> GetResponseNoRetry(
Uri url,
HttpMethod method,
HttpClient client,
object httpContent,
string clientId,
string serializedContent,
Guid? traceId,
byte serializationVersion,
string monitoringAccount)
{
const string LogTag = "GetResponse";
traceId = traceId ?? Guid.NewGuid();
HandleGeneralClientThrottling(url, method, traceId.Value);
var ipUrlBuilder = new UriBuilder(url)
{
Host = (await ConnectionInfo.GetCachedIpAddress(url).ConfigureAwait(false)).Host
};
var request = new HttpRequestMessage(method, ipUrlBuilder.Uri);
var sourceId = Environment.MachineName;
AddStandardHeadersToMessage(request, traceId.Value, sourceId, url.Host);
if (!string.IsNullOrWhiteSpace(clientId))
{
request.Headers.Add(SerializationConstants.ClientIdHeader, clientId);
}
if (serializationVersion == 1)
{
// We won't use this header for new versions of serialization.
request.Headers.Add(SerializationConstants.ScalingFactorDisabledHeader, "true");
}
if (httpContent != null && serializedContent == null)
{
serializedContent = JsonConvert.SerializeObject(httpContent);
}
if (serializedContent != null)
{
request.Content = new StringContent(serializedContent, Encoding.UTF8, "application/json");
}
Logger.Log(
LoggerLevel.Info,
LogId,
LogTag,
"Making HTTP request. TraceId:{0}, Url:{1}, Method:{2}, SourceId:{3}, ContentLength:{4}, DnsTimeoutMs:{5}, TimeoutMs:{6}, SdkVersion:{7}",
traceId,
url,
method,
sourceId,
serializedContent?.Length ?? 0,
ServicePointManager.DnsRefreshTimeout,
client.Timeout.TotalMilliseconds,
AssemblyVersion);
string responseString = null;
var requestLatency = Stopwatch.StartNew();
var stage = "SendRequest";
var handlingServer = "Unknown";
HttpResponseMessage response = null;
try
{
response = await client.SendAsync(request).ConfigureAwait(false);
Logger.Log(
LoggerLevel.Info,
LogId,
LogTag,
"Sent HTTP request, reading response. TraceId:{0}, Url:{1}, SendLatencyMs:{2}",
traceId,
url,
requestLatency.ElapsedMilliseconds);
stage = "ReadResponse";
IEnumerable<string> handlingServerValues;
response.Headers.TryGetValues("__HandlingServerId__", out handlingServerValues);
if (handlingServerValues != null)
{
handlingServer = handlingServerValues.First();
}
requestLatency.Restart();
if (response.Content.Headers.ContentType?.MediaType != null
&& response.Content.Headers.ContentType.MediaType.Equals(SerializationConstants.OctetStreamContentType, StringComparison.OrdinalIgnoreCase))
{
responseString = SerializationConstants.OctetStreamContentType;
}
else
{
responseString = await response.Content.ReadAsStringAsync().ConfigureAwait(false);
}
Logger.Log(
LoggerLevel.Info,
LogId,
LogTag,
"Received HTTP response. TraceId:{0}, Url:{1}, HandlingServer:{2}, ReadLatencyMs:{3}, ResponseStatus:{4}, ResponseLength:{5}",
traceId,
url,
handlingServer,
requestLatency.ElapsedMilliseconds,
response.StatusCode,
responseString?.Length ?? 0);
stage = "ValidateStatus";
response.EnsureSuccessStatusCode();
return Tuple.Create(responseString, response);
}
catch (Exception e)
{
var message = HandleServerSideThrottling(response, url, method, monitoringAccount) ?
$"HTTP request throttled by server. Url:{url}, Method:{method}, Response:{responseString ?? "<none>"}" :
$"Failed to get a response from the server. TraceId:{traceId.Value.ToString("B")}, Url:{request.RequestUri}, HandlingServer:{handlingServer} Stage:{stage}, "
+
$"LatencyMs:{requestLatency.ElapsedMilliseconds}, ResponseStatus:{response?.StatusCode.ToString() ?? "<none>"}, Response:{responseString}.";
Logger.Log(LoggerLevel.Error, LogId, LogTag, message);
throw new MetricsClientException(message, e, traceId.Value, response?.StatusCode);
}
finally
{
requestLatency.Stop();
}
}
private static bool HandleServerSideThrottling(
HttpResponseMessage response,
Uri url,
HttpMethod method,
string monitoringAccount)
{
if (response != null &&
(int)response.StatusCode == HttpStatusCodeThrottled &&
response.Headers != null)
{
if (!response.Headers.Contains(ThrottledIdentityKey) || !response.Headers.Contains(RetryAfterKey))
{
return false;
}
string throttledIdentity = response.Headers.GetValues(ThrottledIdentityKey).FirstOrDefault();
if (string.IsNullOrEmpty(throttledIdentity))
{
return false;
}
string retryAfter = response.Headers.GetValues(RetryAfterKey).FirstOrDefault();
if (string.IsNullOrEmpty(retryAfter))
{
return false;
}
int retyAfterSeconds;
if (int.TryParse(retryAfter, out retyAfterSeconds))
{
DateTime now = DateTime.UtcNow;
ServerThrottledIdentities.AddOrUpdate(
monitoringAccount,
key =>
{
var value = new ConcurrentDictionary<string, DateTime>();
value.TryAdd(throttledIdentity, now.AddSeconds(retyAfterSeconds));
return value;
},
(key, value) =>
{
value.AddOrUpdate(
throttledIdentity,
keyInner => now.AddSeconds(retyAfterSeconds),
(keyInner, valueInner) => now.AddSeconds(retyAfterSeconds));
return value;
});
return true;
}
Logger.Log(
LoggerLevel.Debug,
LogId,
"HandleServerSideThrottling",
"HTTP request throttled by server, but we could not parse retry-after header Url:{0}, Method:{1}, Retry-After {2}",
url,
method);
}
return false;
}
private static bool IsThrottledByServer(string monitoringAccount, string operation, HttpMethod httpMethod)
{
if (string.IsNullOrEmpty(monitoringAccount))
{
return false;
}
ConcurrentDictionary<string, DateTime> throttingIdentityToTimeToProceed;
if (!ServerThrottledIdentities.TryGetValue(monitoringAccount, out throttingIdentityToTimeToProceed))
{
return false;
}
DateTime timeToProceedByOperation;
throttingIdentityToTimeToProceed.TryGetValue(operation, out timeToProceedByOperation);
DateTime timeToProceedByHttpMethod;
throttingIdentityToTimeToProceed.TryGetValue(httpMethod.ToString(), out timeToProceedByHttpMethod);
DateTime timeToProceed = new DateTime(Math.Max(timeToProceedByOperation.Ticks, timeToProceedByHttpMethod.Ticks));
if (timeToProceed <= DateTime.UtcNow)
{
ConcurrentDictionary<string, DateTime> dummy;
ServerThrottledIdentities.TryRemove(monitoringAccount, out dummy);
return false;
}
return true;
}
private static void HandleGeneralClientThrottling(
Uri url,
HttpMethod method,
Guid traceId)
{
var minute = DateTime.UtcNow.Minute;
if (minute == currentMinute)
{
Interlocked.Increment(ref requestsSentInCurrentMinute);
}
else
{
currentMinute = minute;
Interlocked.Exchange(ref requestsSentInCurrentMinute, 0);
}
var effectiveMaxWebRequestsPerMinute = Math.Max(DefaultMaxWebRequestsPerMinute, (int)MaxWebRequestsPerMinute);
if (requestsSentInCurrentMinute > effectiveMaxWebRequestsPerMinute)
{
Logger.Log(
LoggerLevel.Debug,
LogId,
"HandleGeneralClientThrottling",
"HTTP request throttled. Url:{0}, Method:{2}, CurrentRequestsInMinute:{3}, AllowedRequestsInMinute:{4}",
url,
method,
requestsSentInCurrentMinute,
effectiveMaxWebRequestsPerMinute);
throw new MetricsClientException(
$"Client size throttling: no more than [{effectiveMaxWebRequestsPerMinute}] requests can be issued in a minute.",
null,
traceId,
(HttpStatusCode)HttpStatusCodeThrottled);
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="TraceEventHeader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Type that wraps access to the header fields of an ETW event record.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Diagnostics.CodeAnalysis;
/// <summary>
/// Type that wraps access to the header fields of an ETW event record.
/// </summary>
internal unsafe struct TraceEventHeader
{
/// <summary>
/// Pointer to the native structure being wrapped.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2151:FieldsWithCriticalTypesShouldBeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
private readonly NativeMethods.EventHeader* eventHeader;
/// <summary>
/// Initializes a new instance of the <see cref="TraceEventHeader"/> struct.
/// </summary>
/// <param name="eventHeader">
/// Pointer to the native structure being wrapped by the instance.
/// </param>
public TraceEventHeader(NativeMethods.EventHeader* eventHeader)
{
if (eventHeader == null)
{
throw new ArgumentNullException("eventHeader");
}
this.eventHeader = eventHeader;
}
/// <summary>
/// Gets the size of the event record, in bytes.
/// </summary>
public ushort Size
{
get
{
return this.eventHeader->Size;
}
}
/// <summary>
/// Gets the header eventType (reserved).
/// </summary>
public ushort HeaderType
{
get
{
return this.eventHeader->HeaderType;
}
}
/// <summary>
/// Gets the flags that provide information about the event such as the eventType
/// of model it was logged to and if the event contains extended data.
/// </summary>
[SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", MessageId = "Opcode", Justification = "As commonly used in ETW")]
public ushort Flags
{
get
{
return this.eventHeader->Flags;
}
}
/// <summary>
/// Gets the eventType of source to use for parsing the event data.
/// </summary>
public ushort EventProperty
{
get
{
return this.eventHeader->EventProperty;
}
}
/// <summary>
/// Gets the thread that generated the event.
/// </summary>
public int ThreadId
{
get
{
return this.eventHeader->ThreadId;
}
}
/// <summary>
/// Gets the process that generated the event.
/// </summary>
public int ProcessId
{
get
{
return this.eventHeader->ProcessId;
}
}
/// <summary>
/// Gets the the time that the event occurred. The resolution depends on the value
/// of the <c>Wnode.ClientContext</c> member of <c>EVENT_TRACE_PROPERTIES</c> at the time the
/// controller created model.
/// </summary>
public long Timestamp
{
get
{
return this.eventHeader->TimeStamp;
}
}
/// <summary>
/// Gets the GUID that uniquely identifies the provider that logged the event.
/// </summary>
public Guid ProviderId
{
get
{
return this.eventHeader->ProviderId;
}
}
/// <summary>
/// Gets the Id of the event.
/// </summary>
public ushort Id
{
get
{
return this.eventHeader->Id;
}
}
/// <summary>
/// Gets the version of the event.
/// </summary>
public byte Version
{
get
{
return this.eventHeader->Version;
}
}
/// <summary>
/// Gets the channel of the event.
/// </summary>
public byte Channel
{
get
{
return this.eventHeader->Channel;
}
}
/// <summary>
/// Gets the level of the event.
/// </summary>
public byte Level
{
get
{
return this.eventHeader->Level;
}
}
/// <summary>
/// Gets the <c>opcode</c> of the event.
/// </summary>
[SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", MessageId = "Opcode", Justification = "As commonly used in ETW")]
public byte Opcode
{
get
{
return this.eventHeader->Opcode;
}
}
/// <summary>
/// Gets the task of the event.
/// </summary>
public ushort Task
{
get
{
return this.eventHeader->Task;
}
}
/// <summary>
/// Gets the keyword of the event.
/// </summary>
public ulong Keyword
{
get
{
return this.eventHeader->Keyword;
}
}
/// <summary>
/// Gets the elapsed execution time for kernel-mode instructions, in CPU time units.
/// </summary>
public int KernelTime
{
get
{
return this.eventHeader->KernelTime;
}
}
/// <summary>
/// Gets the elapsed execution time for user-mode instructions, in CPU time units.
/// </summary>
public int UserTime
{
get
{
return this.eventHeader->UserTime;
}
}
/// <summary>
/// Gets an identifier that relates two events. For details, see EventWriteTransfer.
/// </summary>
public Guid ActivityId
{
get
{
return this.eventHeader->ActivityId;
}
}
}
}
<file_sep>//-----------------------------------------------------------------------
// <copyright file="ConfigurationValidationException.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
/// <summary>
/// The type of validation being performed at the time of the failure.
/// </summary>
public enum ValidationType
{
ServerSide,
DuplicateMetricSource,
DuplicateDimension,
DuplicatePreaggregate,
DuplicateSamplingType,
BuiltInTypeRemoved,
}
/// <summary>
/// Exception thrown when account object cannot be found.
/// </summary>
public sealed class ConfigurationValidationException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="ConfigurationValidationException"/> class.
/// </summary>
/// <param name="message">Message describing exception situation.</param>
/// <param name="validationType">The type of the validation that failed.</param>
public ConfigurationValidationException(string message, ValidationType validationType)
: base(message)
{
this.ValidationType = validationType;
}
/// <summary>
/// Initializes a new instance of the <see cref="ConfigurationValidationException"/> class.
/// </summary>
/// <param name="message">Message describing exception situation.</param>
/// <param name="validationType">The type of the validation that failed.</param>
/// <param name="innerException">Inner exception which caused exception situation.</param>
public ConfigurationValidationException(string message, ValidationType validationType, Exception innerException)
: base(message, innerException)
{
this.ValidationType = validationType;
}
/// <summary>
/// The type of the validation which failed.
/// </summary>
public ValidationType ValidationType { get; }
}
}
<file_sep># Stamp Locator
Date: 2/11/2019
## Status
## Context
Third party monitoring accounts use a different tenancy model, i.e., the same account exists on all azmon regional stamps.
Therefore, to query metrics, partner team(s) need to use our client API to determine the target regional stamp first for a given region.
There is a hard coded region to MDM stamp mapping in StampLocator of the client library,
so partner team(s) need to take a new client library Nuget package when a new region support is added.
## Decision
We will create a new *public* API in FE to return this mapping. To improve the reliability:
1) We will have a new DNS name *stamplocator.metrics.nsatc.net* which includes 3 stamps in 3 stamp groups in 3 different continents.
The 3 stamps are azglobal, azmonsuk, and azmonejp in priority/tier order.
2) StampLocator.CreateInstanceAsync asks for a folder on the local disk to cache this mapping in case no MDM stamp is reachable.
The folder can be provisioned as an Azure local resource so that it can survive machine reimaging.
If no MDM stamp is reachable and no local cache is available, StampLocator.CreateInstanceAsync will throw;
otherwise it will succeed and auto-refresh will happen in the background hourly.
We expose the following activities for partner team(s) to add monitoring and logging.
```csharp
/// <summary>
/// Creates an instance of <see cref="IStampLocator"/> asynchronously.
/// </summary>
/// <param name="folderToCacheRegionStampMap">The folder to cache region stamp map.</param>
/// <param name="activityReporter">The activity reporter. The string argument contains the error detail when the activity results in an error; otherwise it is null.</param>
/// <returns>An instance of <see cref="StampLocator"/>.</returns>
public static Task<IStampLocator> CreateInstanceAsync(string folderToCacheRegionStampMap, Action<StampLocatorActivity, string> activityReporter)
/// <summary>
/// The stamp locator activities.
/// </summary>
public enum StampLocatorActivity
{
/// <summary>
/// Refreshing the region stamp map from the MDM backend API.
/// </summary>
StartToRefrehRegionStampMap,
FinishedRefreshingRegionStampMap,
FailedToRefrehRegionStampMap,
/// <summary>
/// Loading the region stamp map from the local file regionStampMap.json.
/// </summary>
StartToLoadRegionStampMapFromLocalFile,
FinishedLoadingRegionStampMapFromLocalFile,
FailedToLoadRegionStampMapFromLocalFile,
/// <summary>
/// Writing the region stamp map to the local file regionStampMap.json.
/// </summary>
StartToWriteRegionStampMapToLocalFile,
FinishedWritingRegionStampMapToLocalFile,
FailedToWriteRegionStampMapToLocalFile,
}
```<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Logger.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Logging
{
using System;
using System.Linq;
/// <summary>
/// Static type that provides a "globally" available logging mechanism.
/// </summary>
/// <remarks>
/// Implemented as a static so this does not need to be passed to every and each
/// single type performing any logging, the actual engine doing the logging is
/// defined via interface that should be set for this static as early as possible in
/// the lifetime of the application.
/// </remarks>
public static class Logger
{
private static ILogEngine[] logEngines = { new ConsoleLogEngine(), EventSourceLogEngine.Logger };
private static LoggerLevel maxLogLevel = LoggerLevel.Info;
/// <summary>
/// Gets or sets a value indicating whether to disable logging.
/// </summary>
public static bool DisableLogging { get; set; }
/// <summary>
/// Sets the log engines to be used for logging. This method is not
/// thread safe, the typical scenario is for it to be called once at the
/// start of the program and not anymore. Warning: when the log engine is changed
/// any customer log id created via the CreateCustomLogId method should be considered
/// invalidated and as such objects need to be re-created.
/// </summary>
/// <param name="logEngines">
/// Log engines to be used to produce the logs.
/// </param>
public static void SetLogEngine(params ILogEngine[] logEngines)
{
if (logEngines == null)
{
throw new ArgumentNullException("logEngines");
}
Logger.logEngines = logEngines;
}
/// <summary>
/// Sets the maximum log level, anything above this level should not be
/// logged by the engine.
/// </summary>
/// <param name="level">
/// Lower or equal to it should be logged by the engine, anything above it
/// should not be logged.
/// </param>
public static void SetMaxLogLevel(LoggerLevel level)
{
maxLogLevel = level;
}
/// <summary>
/// Checks if a log statement with the given parameters will be actually logged or
/// not. Useful to avoid expensive operations for log statements that are going to
/// be dropped by the log engine.
/// </summary>
/// <param name="level">
/// Level of the log statement.
/// </param>
/// <param name="logId">
/// Log identification for classifying log statements.
/// </param>
/// <param name="tag">
/// Extra string that allows another level of classification under the log id.
/// </param>
/// <returns>
/// True if the statement is going to be logged, false otherwise.
/// </returns>
internal static bool IsLogged(LoggerLevel level, object logId, string tag)
{
return (level <= maxLogLevel) && logEngines.Any(e => e.IsLogged(level, logId, tag));
}
/// <summary>
/// Gets the maximum log level, anything above this level should not be
/// logged by the engine.
/// </summary>
/// <returns>
/// The maximum log level current in use by the logger.
/// </returns>
internal static LoggerLevel GetMaxLogLevel()
{
return maxLogLevel;
}
/// <summary>
/// Creates a custom object used by the logger to help identifying a boundary of
/// logging (e.g.: component, object, service, etc). Warning: these objects are
/// specific for each implementation of a log engine. If the log engine is changed,
/// via SetLogEngine, all previously created such objects need to be re-created for
/// correct usage with the current log engine.
/// </summary>
/// <param name="logIdName">
/// Friendly name to be associated with the log id.
/// </param>
/// <returns>
/// The object to be used a log id - these objects are specific for each implementation
/// of a log engine. If the log engine is changed, via SetLogEngine, all previously
/// created such objects need to be re-created for correct usage with the current
/// log engine.
/// </returns>
internal static object CreateCustomLogId(string logIdName)
{
if (string.IsNullOrEmpty(logIdName))
{
throw new ArgumentNullException("logIdName");
}
return logIdName;
}
/// <summary>
/// Logs the given data according to the log engine previously set.
/// </summary>
/// <param name="level">
/// Level of the log statement.
/// </param>
/// <param name="logId">
/// Log identification for classifying log statements and also any object
/// that a specific log engine may need to perform its logging. All such
/// objects created before a call to SetLogEngine a different log engine
/// should be re-created with the log engine currently in use.
/// </param>
/// <param name="tag">
/// Extra string that allows another level of classification under the log id.
/// </param>
/// <param name="format">
/// Message to be logged, it can be a format message.
/// </param>
/// <param name="objectParams">
/// Optional, any parameter to be used to build the formatted message string.
/// </param>
internal static void Log(
LoggerLevel level, object logId, string tag, string format, params object[] objectParams)
{
// Do only the cheap log level checking here, the other checking is left to the
// log engine.
if (level <= maxLogLevel && !DisableLogging)
{
foreach (var engine in logEngines)
{
engine.Log(level, logId, tag, format, objectParams);
}
}
}
}
}
<file_sep>// <copyright file="IStampLocator.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
namespace Microsoft.Cloud.Metrics.Client.ThirdParty
{
using System;
using System.Collections.Generic;
/// <summary>
/// Interface for locating the stamp endpoint for Azure external customers' 3rd party accounts.
/// </summary>
public interface IStampLocator
{
/// <summary>
/// Gets the stamp endpoint.
/// </summary>
/// <param name="subscriptionId">The subscription identifier.</param>
/// <param name="azureRegion">The azure region.</param>
/// <returns>The stamp endpoint for the given account identified by <paramref name="subscriptionId"/>.</returns>
Uri GetStampEndpoint(string subscriptionId, string azureRegion);
/// <summary>
/// Gets the stamp name.
/// </summary>
/// <param name="subscriptionId">The subscription identifier.</param>
/// <param name="azureRegion">The azure region.</param>
/// <returns>The stamp name for the given account identified by <paramref name="subscriptionId"/>.</returns>
string GetStampName(string subscriptionId, string azureRegion);
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PreAggregateConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using JetBrains.Annotations;
using Newtonsoft.Json;
/// <summary>
/// Represents configuration for a pre-aggregate of a metric.
/// </summary>
public sealed class PreAggregateConfiguration : IEquatable<PreAggregateConfiguration>
{
private static readonly IEqualityComparer<ICollection<string>> DimensionsEqualityComparer =
new CollectionEqualityComparer<string>(StringComparer.OrdinalIgnoreCase);
private static readonly IEqualityComparer<ICollection<string>> DistinctCountColumnsEqualityComparer =
new CollectionEqualityComparer<string>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Initializes a new instance of the <see cref="PreAggregateConfiguration"/> class.
/// </summary>
/// <param name="displayName">The display name.</param>
/// <param name="dimensions">The list of dimension names for the pre-aggregate.</param>
/// <param name="minMaxMetricsEnabled">Flag indicating whether min/max sampling types are enabled.</param>
/// <param name="percentileMetricsEnabled">Flag indicating whether percentile sampling type is enabled.</param>
/// <param name="distinctCountColumns">The list of dimension names for the distinct count.</param>
[JsonConstructor]
public PreAggregateConfiguration(string displayName, IEnumerable<string> dimensions, bool minMaxMetricsEnabled, bool percentileMetricsEnabled, IEnumerable<string> distinctCountColumns = null)
{
if (string.IsNullOrEmpty(displayName))
{
throw new ArgumentException("displayName is null or empty.");
}
if (dimensions == null)
{
throw new ArgumentNullException("dimensions");
}
this.DisplayName = displayName;
this.Dimensions = dimensions.ToList();
this.MinMaxMetricsEnabled = minMaxMetricsEnabled;
this.PercentileMetricsEnabled = percentileMetricsEnabled;
this.DistinctCountColumns = distinctCountColumns != null ? distinctCountColumns.ToList() : null;
}
/// <summary>
/// Gets the display name.
/// </summary>
public string DisplayName { get; private set; }
/// <summary>
/// Gets the collections of dimensions which this pre-aggregate represents.
/// </summary>
public ICollection<string> Dimensions { get; private set; }
/// <summary>
/// Gets the collections of distinct count columns.
/// </summary>
public ICollection<string> DistinctCountColumns { get; private set; }
/// <summary>
/// Gets a value indicating whether min and max should be generated for the pre-aggregate.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool MinMaxMetricsEnabled { get; private set; }
/// <summary>
/// Gets a value indicating whether percentile should be generated for the pre-aggregate.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool PercentileMetricsEnabled { get; private set; }
public static bool operator ==([CanBeNull] PreAggregateConfiguration left, [CanBeNull] PreAggregateConfiguration right)
{
return Equals(left, right);
}
public static bool operator !=([CanBeNull] PreAggregateConfiguration left, [CanBeNull] PreAggregateConfiguration right)
{
return !Equals(left, right);
}
/// <summary>
/// Indicates whether the current object is equal to another object of the same type.
/// </summary>
/// <param name="other">An object to compare with this object.</param>
/// <returns>
/// true if the current object is equal to the <paramref name="other" /> parameter; otherwise, false.
/// </returns>
public bool Equals(PreAggregateConfiguration other)
{
if (ReferenceEquals(null, other))
{
return false;
}
if (ReferenceEquals(this, other))
{
return true;
}
return string.Equals(this.DisplayName, other.DisplayName, StringComparison.OrdinalIgnoreCase)
&& DimensionsEqualityComparer.Equals(this.Dimensions, other.Dimensions)
&& DistinctCountColumnsEqualityComparer.Equals(this.DistinctCountColumns, other.DistinctCountColumns)
&& this.MinMaxMetricsEnabled == other.MinMaxMetricsEnabled
&& this.PercentileMetricsEnabled == other.PercentileMetricsEnabled;
}
/// <summary>
/// Determines whether the specified <see cref="object" />, is equal to this instance.
/// </summary>
/// <param name="obj">The <see cref="object" /> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="object" /> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj))
{
return false;
}
if (ReferenceEquals(this, obj))
{
return true;
}
var castObj = obj as PreAggregateConfiguration;
return castObj != null
&& this.Equals(castObj);
}
/// <inheritdoc />
public override int GetHashCode()
{
unchecked
{
// ReSharper disable NonReadonlyMemberInGetHashCode Justification = "All properties have a private setter used only for Json Serialization, it is effectively readonly"
var hashCode = this.DisplayName != null
? StringComparer.OrdinalIgnoreCase.GetHashCode(this.DisplayName)
: 0;
hashCode = (hashCode * 397) ^ DimensionsEqualityComparer.GetHashCode(this.Dimensions);
hashCode = (hashCode * 397)
^ DistinctCountColumnsEqualityComparer.GetHashCode(this.DistinctCountColumns);
hashCode = (hashCode * 397) ^ this.MinMaxMetricsEnabled.GetHashCode();
hashCode = (hashCode * 397) ^ this.PercentileMetricsEnabled.GetHashCode();
// ReSharper restore NonReadonlyMemberInGetHashCode
return hashCode;
}
}
/// <summary>
/// Compares two collections for equality by using the contained items' <see cref="IEquatable{T}" /> methods
/// </summary>
/// <typeparam name="T">Object type.</typeparam>
private sealed class CollectionEqualityComparer<T> : IEqualityComparer<ICollection<T>>
{
private readonly IEqualityComparer<T> itemComparer;
/// <summary>
/// Initializes a new instance of the <see cref="CollectionEqualityComparer{T}"/> class.
/// </summary>
/// <param name="itemComparer">The item comparer.</param>
internal CollectionEqualityComparer([NotNull] IEqualityComparer<T> itemComparer)
{
this.itemComparer = itemComparer;
}
/// <inheritdoc />
bool IEqualityComparer<ICollection<T>>.Equals([CanBeNull] ICollection<T> x, [CanBeNull] ICollection<T> y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (ReferenceEquals(x, null) || ReferenceEquals(y, null))
{
return false;
}
if (x.Count != y.Count)
{
return false;
}
int xNullCount;
int yNullCount;
IDictionary<T, int> xCountsDictionary = this.CreateCountDictionary(x, out xNullCount);
IDictionary<T, int> yCountsDictionary = this.CreateCountDictionary(y, out yNullCount);
if (xNullCount != yNullCount)
{
return false;
}
foreach (var kvp in xCountsDictionary)
{
int yValue;
if (!yCountsDictionary.TryGetValue(kvp.Key, out yValue))
{
return false;
}
if (kvp.Value != yValue)
{
return false;
}
}
return true;
}
/// <inheritdoc />
public int GetHashCode([CanBeNull] ICollection<T> obj)
{
if (obj == null)
{
return 0;
}
unchecked
{
int hashCode = 0;
foreach (var item in obj)
{
hashCode += ReferenceEquals(item, null) ? -1 : this.itemComparer.GetHashCode(item);
}
return hashCode;
}
}
[NotNull]
private IDictionary<T, int> CreateCountDictionary([NotNull] ICollection<T> collection, out int nullCount)
{
// CODESYNC: Args passed to .GroupBy and .ToDictionary affect the construction of countDictionaryComparer
Dictionary<T, int> countsDictionary = new Dictionary<T, int>(this.itemComparer);
nullCount = 0;
foreach (var item in collection)
{
if (ReferenceEquals(item, null))
{
nullCount++;
continue;
}
if (!countsDictionary.ContainsKey(item))
{
countsDictionary[item] = 1;
}
else
{
countsDictionary[item]++;
}
}
return countsDictionary;
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IConfigurationUpdateResult.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// Interface to represent the configuration update result.
/// </summary>
public interface IConfigurationUpdateResult
{
/// <summary>
/// Monitoring account on which configuration was updated.
/// </summary>
string MonitoringAccount { get; }
/// <summary>
/// True if configuration is updated successfully. False, otherwise.
/// </summary>
bool Success { get; set; }
/// <summary>
/// Exception details in case of failures.
/// </summary>
string Message { get; set; }
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="BitBinaryReaderV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.BitHelper
{
using System.IO;
using System.Runtime.CompilerServices;
/// <summary>
/// The class which allows reading bits from stream one by one.
/// </summary>
public sealed class BitBinaryReaderV2
{
private readonly BinaryReader reader;
private int currentBit;
private byte currentByte;
/// <summary>
/// Initializes a new instance of the <see cref="BitBinaryReaderV2"/> class.
/// </summary>
/// <param name="reader">The reader.</param>
public BitBinaryReaderV2(BinaryReader reader)
{
this.reader = reader;
this.currentBit = 128;
}
/// <summary>
/// Gett the index of the current bit from which data is read.
/// </summary>
public byte CurrentBitIndex { get; private set; }
/// <summary>
/// Gets the <see cref="BinaryReader"/>.
/// </summary>
public BinaryReader BinaryReader => this.reader;
/// <summary>
/// Reads bits from the stream.
/// </summary>
/// <param name="numBits">The number of bits.</param>
/// <returns>
/// Read bit.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public long ReadBits(int numBits)
{
long result = 0;
int i;
// First complete the current byte read
for (i = numBits - 1; i >= 0 && this.CurrentBitIndex != 0; --i)
{
if (this.ReadBit())
{
result |= 1L << i;
}
}
// Now read byte by byte
for (; i >= 7; i -= 8)
{
result |= ((long)this.GetCurrentByte()) << (i - 7);
}
// Now read the left bits
for (; i >= 0; --i)
{
if (this.ReadBit())
{
result |= 1L << i;
}
}
return result;
}
/// <summary>
/// Reads one bit from the stream.
/// </summary>
/// <returns>Read bit.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool ReadBit()
{
var result = (this.GetCurrentByte() & this.currentBit) != 0;
if (this.currentBit == 1)
{
this.currentBit = 128;
this.CurrentBitIndex = 0;
}
else
{
this.currentBit >>= 1;
++this.CurrentBitIndex;
}
return result;
}
/// <summary>
/// Reads uint value stored in Base-128 encoding.
/// </summary>
/// <returns>Read value.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public uint ReadUInt32FromBase128()
{
return (uint)this.ReadUInt64FromBase128();
}
/// <summary>
/// Reads ulong value stored in Base-128 encoding.
/// </summary>
/// <returns>Read value.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ulong ReadUInt64FromBase128()
{
ulong val = 0;
var shift = 0;
byte b;
do
{
b = (byte)this.ReadBits(8);
val = val + ((ulong)(b & 0x7f) << shift);
shift += 7;
}
while ((b & 0x80) != 0);
return val;
}
/// <summary>
/// Reads till end of byte boundary is reached.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void ReadTillEndOfByteBoundary()
{
while (this.currentBit != 128)
{
this.ReadBit();
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private byte GetCurrentByte()
{
if (this.currentBit == 128)
{
this.currentByte = this.reader.ReadByte();
}
return this.currentByte;
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IDiagnosticHeartbeat.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricsExtension
{
using System;
/// <summary>
/// The interface representing the diagnostic heartbeat event emitted by the MetricsExtension over ETW.
/// </summary>
public interface IDiagnosticHeartbeat
{
/// <summary>
/// Gets the name of the MetricsExtension instance this heartbeat was emitted from.
/// </summary>
string InstanceName { get; }
/// <summary>
/// Gets the uptime in seconds.
/// </summary>
int UptimeInSec { get; }
/// <summary>
/// Gets the count of ETW events that were successfully received by the extension but could not be processed and were dropped.
/// </summary>
int EtwEventsDroppedCount { get; }
/// <summary>
/// Gets the count of ETW events that were lost prior to being recieved by the extension.
/// </summary>
int EtwEventsLostCount { get; }
/// <summary>
/// Gets the count of aggregated metrics that were dropped prior to publication.
/// </summary>
int AggregatedMetricsDroppedCount { get; }
/// <summary>
/// Gets a value indicating whether this instance is nearing reaching the maximum ETW processing queue size.
/// </summary>
bool IsNearingEtwQueueLimit { get; }
/// <summary>
/// Gets a value indicating whether this instance is nearing reaching the maximum aggregation queue limit for any of the publishers.
/// </summary>
bool IsNearingAggregationQueueLimit { get; }
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="ClientAssemblyMigration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
/// <summary>
/// Represents a single conversion from the serialized assembly to the deserialization assembly.
/// </summary>
internal class ClientAssemblyMigration
{
/// <summary>
/// Initializes a new instance of the <see cref="ClientAssemblyMigration" /> class.
/// </summary>
/// <param name="fromAssembly">From assembly.</param>
/// <param name="fromType">From type.</param>
/// <param name="toType">To type.</param>
public ClientAssemblyMigration(string fromAssembly, string fromType, Type toType)
{
this.FromAssembly = fromAssembly;
this.FromType = fromType;
this.ToType = toType;
}
/// <summary>
/// Original assembly.
/// </summary>
public string FromAssembly { get; }
/// <summary>
/// Original type.
/// </summary>
public string FromType { get; }
/// <summary>
/// Type to bind to.
/// </summary>
public Type ToType { get; }
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="DiagnosticHeartbeat.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricsExtension
{
using Metrics.Etw;
/// <summary>
/// A class that represents the extension diagnostic heartbeat.
/// </summary>
internal sealed class DiagnosticHeartbeat : IDiagnosticHeartbeat
{
/// <summary>
/// Gets the count of aggregated metrics that were dropped prior to publication.
/// </summary>
public int AggregatedMetricsDroppedCount { get; private set; }
/// <summary>
/// Gets the count of ETW events that were successfully received by the extension but could not be processed and were dropped.
/// </summary>
public int EtwEventsDroppedCount { get; private set; }
/// <summary>
/// Gets the count of ETW events that were lost prior to being recieved by the extension.
/// </summary>
public int EtwEventsLostCount { get; private set; }
/// <summary>
/// Gets the name of the MetricsExtension instance this heartbeat was emitted from.
/// </summary>
public string InstanceName { get; private set; }
/// <summary>
/// Gets a value indicating whether this instance is nearing reaching the maximum aggregation queue limit for any of the publishers.
/// </summary>
public bool IsNearingAggregationQueueLimit { get; private set; }
/// <summary>
/// Gets a value indicating whether this instance is nearing reaching the maximum ETW processing queue size.
/// </summary>
public bool IsNearingEtwQueueLimit { get; private set; }
/// <summary>
/// Gets the uptime in seconds.
/// </summary>
public int UptimeInSec { get; private set; }
/// <summary>
/// Froms the etw event.
/// </summary>
/// <param name="etwMetricData">The etw metric data.</param>
/// <returns>The diagnostic heartbeat.</returns>
public static unsafe IDiagnosticHeartbeat FromEtwEvent(NativeMethods.EventRecord* etwMetricData)
{
var heartbeat = new DiagnosticHeartbeat();
var pointerInPayload = etwMetricData->UserData;
heartbeat.InstanceName = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
var uptimeSec = *((int*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(int));
heartbeat.UptimeInSec = uptimeSec;
var etwEventsDropped = *((int*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(int));
heartbeat.EtwEventsDroppedCount = etwEventsDropped;
var etwEventsLost = *((int*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(int));
heartbeat.EtwEventsLostCount = etwEventsLost;
var aggregatedMetricsDropped = *((int*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(int));
heartbeat.AggregatedMetricsDroppedCount = aggregatedMetricsDropped;
var isNearingEtwLimit = *((byte*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(byte));
heartbeat.IsNearingEtwQueueLimit = isNearingEtwLimit != 0;
var isNearingAggregationLimit = *((byte*)pointerInPayload);
heartbeat.IsNearingAggregationQueueLimit = isNearingAggregationLimit != 0;
return heartbeat;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="FileOperationHelper.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Logging;
/// <summary>
/// Helper class for file I/O
/// </summary>
internal static class FileOperationHelper
{
private static readonly object LogId = Logger.CreateCustomLogId("FileOperationHelper");
/// <summary>
/// Create folder if not existed
/// </summary>
/// <param name="folder">The folder</param>
/// <returns>Whether the folder is successfully created</returns>
internal static bool CreateFolderIfNotExists(string folder)
{
if (!Directory.Exists(folder))
{
try
{
Directory.CreateDirectory(folder);
}
catch (Exception e)
{
Logger.Log(LoggerLevel.Error, LogId, "CreateFolderIfNotExists", $"Fail to create folder {folder}. {e}");
return false;
}
}
return true;
}
/// <summary>
/// Save config to disk
/// </summary>
/// <param name="path">The path for saving the config</param>
/// <param name="content">The content to be saved</param>
internal static void SaveContentToFile(string path, string content)
{
try
{
File.WriteAllText(path, content);
}
catch (Exception e)
{
Logger.Log(LoggerLevel.Error, LogId, "SaveConfig", $"Fail to save file {path}. {e}");
throw;
}
}
/// <summary>
/// Async save config to disk
/// </summary>
/// <param name="path">The path for saving the config</param>
/// <param name="content">The content to be saved</param>
/// <returns>return a Task</returns>
internal static async Task SaveContentToFileAsync(string path, string content)
{
try
{
using (StreamWriter writer = new StreamWriter(path, false))
{
await writer.WriteAsync(content).ConfigureAwait(false);
}
}
catch (Exception e)
{
Logger.Log(LoggerLevel.Error, LogId, "SaveContentToFileAsync", $"Fail to save file {path}. {e}");
throw;
}
}
}
}<file_sep>// <copyright file="IPoolTrackable.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
namespace Metrics.Services.Common
{
/// <summary>
/// The state of the poolable object
/// </summary>
public enum PoolObjectTrackingInfo : byte
{
/// <summary>
/// The object does not support allocation/release tracking
/// </summary>
TrackingNotSupported = 0,
/// <summary>
/// The object is currently owned by the pool and was not allocated to the user code
/// </summary>
InPool = 1,
/// <summary>
/// The object is currently allocated from the pool and is owned by the user code
/// </summary>
Allocated = 2,
/// <summary>
/// The object was instantiated but the value was not set.
/// TODO: this should have the value of 0 (and TrackingNotSupported should be 3) when unit tests that return the object to pool without
/// allocating it from pool are fixed - this way we can catch the NotSet value during release for cases of objects being
/// returned to pool without being allocated from the pool
/// </summary>
NotSet = 3,
}
/// <summary>
/// All poolable objects must implement this interface to
/// enable object tracking by pool code
/// </summary>
public interface IPoolTrackable
{
/// <summary>
/// Gets or sets the value indicating whether this object is currently owned by a pool.
/// This property is set by object pool code.
/// Initial value of the property is PoolObjectTrackingInfo.TrackingNotSupported
/// </summary>
PoolObjectTrackingInfo PoolObjectTrackingInfo { get; set; }
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="PercentileConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// Determine if the percentile sampling types are available for this preaggregate.
/// </summary>
public sealed class PercentileConfiguration : IPercentileConfiguration
{
/// <summary>
/// PercentileConfiguration where percentile sampling types are enabled.
/// </summary>
public static readonly PercentileConfiguration PercentileEnabled = new PercentileConfiguration(true);
/// <summary>
/// PercentileConfiguration where percentile sampling types are disabled.
/// </summary>
public static readonly PercentileConfiguration PercentileDisabled = new PercentileConfiguration(false);
/// <summary>
/// Initializes a new instance of the <see cref="PercentileConfiguration"/> class.
/// </summary>
/// <param name="enabled">Whether or not the feature is enabled.</param>
[JsonConstructor]
internal PercentileConfiguration(bool enabled)
{
this.Enabled = enabled;
}
/// <summary>
/// Determines if the percentile sampling types are enabled or disabled.
/// </summary>
public bool Enabled { get; }
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="AssemblyInfo.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
using System.Reflection;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
[assembly: System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1016:MarkAssembliesWithAssemblyVersion", Justification = "Assembly Version not required.")]
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IExpression.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// An expression that can be evaluated for computed sampling types or composite metrics.
/// </summary>
public interface IExpression
{
/// <summary>
/// Gets or sets the name of the expression.
/// </summary>
string Name { get; set; }
/// <summary>
/// Gets or sets the expression.
/// </summary>
string Expression { get; set; }
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="QueryResult.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System.Collections.Generic;
using System.Text;
using Newtonsoft.Json;
/// <summary>
/// Represents a single time series that was selected and is being returned as a query result.
/// </summary>
internal sealed class QueryResult : IQueryResult
{
/// <summary>
/// Initializes a new instance of the <see cref="QueryResult"/> class.
/// Create a single query result
/// </summary>
/// <param name="dimensionList">The list of dimensions.</param>
/// <param name="evaluatedResult">The evaluated result.</param>
/// <param name="seriesValues">The time series values, only included if specified in the query.</param>
[JsonConstructor]
internal QueryResult(
KeyValuePair<string, string>[] dimensionList,
double? evaluatedResult,
double?[] seriesValues)
{
this.DimensionList = dimensionList;
this.EvaluatedResult = evaluatedResult;
this.TimeSeries = seriesValues;
}
/// <summary>
/// Set of valid dimension name-value pairs that meet the query condition.
/// </summary>
public IReadOnlyList<KeyValuePair<string, string>> DimensionList { get; private set; }
/// <summary>
/// Gets the evaluated value for this time series that meets the condition set in the query (provided for evidence and/or sorting).
/// </summary>
public double? EvaluatedResult { get; private set; }
/// <summary>
/// Gets the full collection time series values for the query interval. It should be null if
/// the query did not request the full collection of values to be returned.
/// </summary>
public double?[] TimeSeries { get; private set; }
/// <summary>
/// Returns a <see cref="string" /> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="string" /> that represents this instance.
/// </returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.AppendLine(string.Format("Result: {0}", this.EvaluatedResult));
sb.Append("Dimensions:");
foreach (var pair in this.DimensionList)
{
sb.Append(string.Format("{0}: {1};", pair.Key, pair.Value));
}
sb.AppendLine();
if (this.TimeSeries != null && this.TimeSeries.Length > 0)
{
sb.Append("[");
sb.Append(string.Join(", ", this.TimeSeries));
sb.AppendLine("]");
}
return sb.ToString();
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IFilteredTimeSeriesQueryResponse.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using System.Collections.Generic;
/// <summary>
/// An interface for filtered time series query response.
/// </summary>
public interface IFilteredTimeSeriesQueryResponse
{
/// <summary>
/// Gets the query request.
/// </summary>
FilteredTimeSeriesQueryRequest QueryRequest { get; }
/// <summary>
/// Gets the start time in UTC for the query results.
/// </summary>
DateTime StartTimeUtc { get; }
/// <summary>
/// Gets the end time in UTC for the query results.
/// </summary>
DateTime EndTimeUtc { get; }
/// <summary>
/// Gets the time resolution in milliseconds for the query results.
/// </summary>
int TimeResolutionInMinutes { get; }
/// <summary>
/// Gets the <see cref="FilteredTimeSeries"/> list. Each item represents a single time series where start time, end time and time resolution
/// is represented by this object members.
/// </summary>
IReadOnlyList<IFilteredTimeSeries> FilteredTimeSeriesList { get; }
/// <summary>
/// Gets the error code.
/// </summary>
FilteredTimeSeriesQueryResponseErrorCode ErrorCode { get; }
/// <summary>
/// Gets the diagnostics information.
/// </summary>
DiagnosticInfo DiagnosticInfo { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SerializationConstants.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
/// <summary>
/// Serialization constants.
/// </summary>
public static class SerializationConstants
{
/// <summary>
/// The default series resolution in minutes.
/// </summary>
public const int DefaultSeriesResolutionInMinutes = 1;
/// <summary>
/// The http header indicating if we should skip scaling the metrics data.
/// </summary>
public const string ScalingFactorDisabledHeader = "__ScalingFactorDisabled__";
/// <summary>
/// The MIME type for octet streams.
/// </summary>
public const string OctetStreamContentType = "application/octet-stream";
/// <summary>
/// The trace identifier header.
/// </summary>
public const string TraceIdHeader = "TraceGuid";
/// <summary>
/// The client identifier header.
/// </summary>
public const string ClientIdHeader = "ClientId";
/// <summary>
/// The maximum metric description length allowed.
/// </summary>
public const int MaximumMetricDescriptionLength = 1024;
/// <summary>
/// The maximum description length allowed.
/// </summary>
public const int MaximumDescriptionLength = 256;
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="BitBinaryWriter.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.BitHelper
{
using System.IO;
using System.Runtime.CompilerServices;
/// <summary>
/// A class which allows writing bits to the stream.
/// We accumulate current byte value bit by bit and when it is full, it is written to the stream.
/// <see cref="Flush" /> method must be called at the end since the last byte to write to stream could be partial.
/// </summary>
public sealed class BitBinaryWriter
{
private const int HighestBitInByte = 1 << 7;
private readonly BinaryWriter writer;
private int currentBit;
/// <summary>
/// Initializes a new instance of the <see cref="BitBinaryWriter"/> class.
/// </summary>
/// <param name="writer">The writer.</param>
public BitBinaryWriter(BinaryWriter writer)
{
this.writer = writer;
this.currentBit = HighestBitInByte;
this.CurrentByte = 0;
}
/// <summary>
/// Gets the value of the currently accumulated byte.
/// </summary>
public byte CurrentByte { get; private set; }
/// <summary>
/// Gets the underlying binary writer.
/// </summary>
public BinaryWriter BinaryWriter => this.writer;
/// <summary>
/// Write bits to the stream.
/// </summary>
/// <param name="value">The value.</param>
/// <param name="numBits">The number bits.</param>
/// <param name="positionOfLeastSignificantBit">The position of least significant bit, starting with 0.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void WriteBits(long value, int numBits, int positionOfLeastSignificantBit)
{
for (int i = numBits; i > 0; --i)
{
long mask = 1L << (i - 1 + positionOfLeastSignificantBit);
bool bit = (value & mask) != 0;
this.CurrentByte = (byte)(this.CurrentByte | (bit ? this.currentBit : 0));
this.currentBit >>= 1;
if (this.currentBit == 0)
{
this.writer.Write(this.CurrentByte);
this.CurrentByte = 0;
this.currentBit = HighestBitInByte;
}
}
}
/// <summary>
/// Write bit to the stream.
/// </summary>
/// <param name="bit">Bit value to write, where True means 1 and False means 0.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void WriteBit(bool bit)
{
this.CurrentByte = (byte)(this.CurrentByte | (bit ? this.currentBit : 0));
this.currentBit >>= 1;
if (this.currentBit == 0)
{
this.writer.Write(this.CurrentByte);
this.CurrentByte = 0;
this.currentBit = HighestBitInByte;
}
}
/// <summary>
/// Flush the current byte into stream even if it is partial.
/// </summary>
public void Flush()
{
if (this.currentBit != HighestBitInByte)
{
this.writer.Write(this.CurrentByte);
this.CurrentByte = 0;
this.currentBit = HighestBitInByte;
}
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IPreaggregation.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
/// <summary>
/// A grouping of dimensions used to aggregate metric data.
/// </summary>
public interface IPreaggregation
{
/// <summary>
/// Gets or sets the name of the preaggregate.
/// </summary>
string Name { get; set; }
/// <summary>
/// Gets the dimensions of the preaggregate in sorted order..
/// </summary>
IEnumerable<string> Dimensions { get; }
/// <summary>
/// The min/max sampling type configuration.
/// </summary>
IMinMaxConfiguration MinMaxConfiguration { get; set; }
/// <summary>
/// The percentile sampling type configuration.
/// </summary>
IPercentileConfiguration PercentileConfiguration { get; set; }
/// <summary>
/// The data rollup configuration.
/// </summary>
IRollupConfiguration RollupConfiguration { get; set; }
/// <summary>
/// The metric data store configuration.
/// </summary>
IPublicationConfiguration PublicationConfiguration { get; set; }
/// <summary>
/// The distinct count sampling type configuration.
/// </summary>
IDistinctCountConfiguration DistinctCountConfiguration { get; set; }
/// <summary>
/// The filtering configuration.
/// </summary>
IFilteringConfiguration FilteringConfiguration { get; set; }
/// <summary>
/// Adds the dimension to the preaggregate.
/// </summary>
/// <param name="dimensionName">Name of the dimension.</param>
void AddDimension(string dimensionName);
/// <summary>
/// Removes the dimension from the preaggregate.
/// </summary>
/// <param name="dimensionName">Name of the dimension.</param>
void RemoveDimension(string dimensionName);
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="ICompositeMetricConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
/// <summary>
/// Represents a composite metric in the MDM System.
/// </summary>
public interface ICompositeMetricConfiguration : IMetricConfiguration
{
/// <summary>
/// Gets the metric sources.
/// </summary>
IEnumerable<CompositeMetricSource> MetricSources { get; }
/// <summary>
/// Gets the composite expressions.
/// </summary>
IEnumerable<CompositeExpression> CompositeExpressions { get; }
/// <summary>
/// Gets or sets a value indicating whether to treat missing series as zeroes.
/// </summary>
bool TreatMissingSeriesAsZeroes { get; set; }
/// <summary>
/// Adds the metric source.
/// </summary>
/// <param name="metricSource">The metric source.</param>
void AddMetricSource(CompositeMetricSource metricSource);
/// <summary>
/// Removes the metric source.
/// </summary>
/// <param name="metricSourceName">The metric source name.</param>
void RemoveMetricSource(string metricSourceName);
/// <summary>
/// Adds the expression.
/// </summary>
/// <param name="expression">The expression.</param>
void AddExpression(CompositeExpression expression);
/// <summary>
/// Removes the expression.
/// </summary>
/// <param name="expressionName">The expression name.</param>
void RemoveExpression(string expressionName);
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="MetricConfigurationV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Configuration
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using Newtonsoft.Json;
/// <summary>
/// The configuration used by MetricsExtension. It will only contain the necessary
/// fields needed by the client.
/// </summary>
public struct MetricConfigurationV2 : IEquatable<MetricConfigurationV2>
{
/// <summary>
/// The name of the metric used for pumping data for <see cref="SamplingTypes.Count"/>
/// in the new (Monitoring Agent) pipeline.
/// </summary>
public const string CountMetricName = "Count";
/// <summary>
/// The name of the metric used for pumping data for <see cref="SamplingTypes.Sum"/>
/// in the new (Monitoring Agent) pipeline.
/// </summary>
/// <remarks>
/// This is the metric contains configuration indicating whether <see cref="SamplingTypes.Max"/>,
/// <see cref="SamplingTypes.Min"/>, <see cref="Histogram"/> etc.. are enabled.
/// </remarks>
public const string SumMetricName = "Sum";
/// <summary>
/// Gets the default configuration to be used, when the configuration is not obtained from server.
/// </summary>
public const SamplingTypes DefaultSamplingTypes = SamplingTypes.Count | SamplingTypes.Sum;
/// <summary>
/// Gets or sets the collections of dimensions configured for Distinct count.
/// </summary>
[JsonProperty("pc")]
public List<PreAggregateConfiguration> PreAggregationsWithDistinctCountColumns;
/// <summary>
/// The map of "special" strings in metric names to default scaling factors to be used when scaling factor is not configured for metrics.
/// </summary>
/// <remarks>
/// The values for scaling factor in this list must be greater than or equal to zero.
/// </remarks>
private static readonly List<KeyValuePair<string, float>> DefaultScalingFactorForMetrics =
new List<KeyValuePair<string, float>> { new KeyValuePair<string, float>("%", 100f), };
/// <summary>
/// Initializes a new instance of the <see cref="MetricConfigurationV2"/> struct.
/// </summary>
/// <param name="metricName">Name of the metric.</param>
/// <param name="scalingFactor">The scaling factor.</param>
/// <param name="samplingTypes">The sampling types.</param>
public MetricConfigurationV2(string metricName, float? scalingFactor, SamplingTypes samplingTypes)
: this()
{
this.MetricName = metricName;
this.ScalingFactor = scalingFactor;
this.SamplingTypes = samplingTypes;
this.EnableClientSideForking = true;
this.EnableClientSidePublication = true;
this.EnableClientSideEtwPublication = true;
this.IngestionOptions = 0;
this.HyperLogLogBValue = HyperLogLogSketch.DefaultBValue;
}
/// <summary>
/// Gets or sets the version of this instance.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public uint Version { get; set; }
/// <summary>
/// Gets or sets the Monitoring Account to which this metric is reported.
/// </summary>
[JsonProperty(PropertyName = "ma", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string MonitoringAccount { get; set; }
/// <summary>
/// Gets or sets the metric namespace.
/// </summary>
[JsonProperty(PropertyName = "ns", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string MetricNamespace { get; set; }
/// <summary>
/// Gets or sets the metric name.
/// </summary>
[JsonProperty(PropertyName = "m")]
public string MetricName { get; set; }
/// <summary>
/// Gets the metric name and sampling types.
/// </summary>
[JsonProperty(PropertyName = "s", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]
[DefaultValue(DefaultSamplingTypes)]
public SamplingTypes SamplingTypes { get; set; }
/// <summary>
/// Gets or sets the scaling factor to be used.
/// </summary>
/// <remarks>
/// Clients should pass un-scaled values when this is null.
/// </remarks>
[JsonProperty(PropertyName = "sf", DefaultValueHandling = DefaultValueHandling.Ignore)]
public float? ScalingFactor { get; set; }
/// <summary>
/// Gets or sets the collections of dimensions configured for Distinct count.
/// </summary>
/// <remarks>
/// Each HyperLogLog counter uses a small, fixed amount of space but can estimate
/// the cardinality of any set of up to around a billion values with relative error
/// of 1.04 / Math.sqrt(2 ** b) with high probability.
/// </remarks>
[JsonProperty("bv")]
public int HyperLogLogBValue { get; set; }
/// <summary>
/// Gets or sets the list of dimension configurations.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public IList<DimensionConfiguration> DimensionConfigurations { get; set; }
/// <summary>
/// Gets or sets the list of pre-aggregates which this metric will be aggregated with.
/// </summary>
/// <remarks>
/// We have hidden the configuration APIs.
/// If we ever need to expose the configuration APIs, we need to revisit the types of properties.
/// For this particular property, we can expose a strong-typed class named PreAggregateConfigurationList that can check for duplicate entries,
/// or we can even make the whole configuration object read only.
/// </remarks>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public List<PreAggregateConfiguration> PreAggregations { get; set; }
/// <summary>
/// Gets or sets the computed sampling types.
/// In the case of <see cref="IsCompositeMetric"/>, this field is used to save the expressions and their names.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public IList<ComputedSamplingTypeConfiguration> ComputedSamplingTypes { get; set; }
/// <summary>
/// Gets or sets the list of the variable names of metrics to the metric identifiers.
/// The keys can be used in the <see cref="ComputedSamplingTypeConfiguration.Expression"/> field of the <see cref="ComputedSamplingTypes"/>.
/// </summary>
/// <remarks>
/// Used only for composite metrics.
/// Composite metric cannot use <see cref="MetricIdentifier"/> of another composite metric as of now, so the UI should show hints appropriately.
/// </remarks>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public Dictionary<string, MetricIdentifier> MetricIdentifiers { get; set; }
/// <summary>
/// Gets a value indicating whether the metric configuration is for a composite metric (i.e. one that is composed of multiple metrics).
/// </summary>
[JsonIgnore]
public bool IsCompositeMetric
{
get { return this.MetricIdentifiers != null && this.MetricIdentifiers.Count > 0; }
}
/// <summary>
/// Gets or sets whether this event should be published by the metrics extension to any of the account defined endpoints.
/// </summary>
/// <remarks>
/// If false, publication will not occur to the FE, but it will still be produced as a local aggregate on the client machine for Cosmos upload.
/// </remarks>
[JsonProperty("pe", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]
[DefaultValue(true)]
public bool EnableClientSidePublication { get; set; }
/// <summary>
/// Gets or sets whether this event should be published by the metrics extension to any of the external account endpoints.
/// </summary>
/// <remarks>
/// If false, publication will still occur to the MDM primary endpoint but not to any of the external/forking endpoints.
/// </remarks>
[JsonProperty("fe", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]
[DefaultValue(true)]
public bool EnableClientSideForking { get; set; }
/// <summary>
/// Gets or sets whether this event should be published to the aggregated ETW provider.
/// </summary>
[JsonProperty("epe", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]
[DefaultValue(true)]
public bool EnableClientSideEtwPublication { get; set; }
/// <summary>
/// Gets or sets ingestions options to be used for this metric.
/// </summary>
/// <remarks>
/// If false, publication will still occur to the MDM primary endpoint but not to any of the external/forking endpoints.
/// </remarks>
[JsonProperty("igo", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]
[DefaultValue(true)]
public short IngestionOptions { get; set; }
/// <summary>
/// Gets or sets whether metrics in composite metrics should be considered as zero when metric is not reported or is missing.
/// </summary>
/// <remarks>
/// Only used for Composite metrics.
/// </remarks>
[JsonProperty("tz", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]
[DefaultValue(false)]
public bool TreatMissingSeriesAsZeros { get; set; }
/// <summary>
/// Gets the default scaling factor if <paramref name="metric" /> contains scaling indicators.
/// </summary>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metric">The metric name.</param>
/// <returns>
/// The default scaling factor.
/// </returns>
public static float? GetDefaultScalingFactor(string metricNamespace, string metric)
{
float? defaultScalingFactor = null;
foreach (var entry in DefaultScalingFactorForMetrics)
{
if (metric.Contains(entry.Key))
{
defaultScalingFactor = entry.Value;
}
}
return defaultScalingFactor;
}
/// <summary>
/// Determines whether an instance is equal to this instance.
/// </summary>
/// <param name="other">Instance to compare to this instance.</param>
/// <returns>
/// true if the specified objects are equal; otherwise, false.
/// </returns>
public bool Equals(MetricConfigurationV2 other)
{
if (other.MetricName == null)
{
if (this.MetricName == null)
{
return true;
}
return false;
}
if (this.MetricName == null)
{
return false;
}
return StringComparer.OrdinalIgnoreCase.Equals(this.MetricName, other.MetricName);
}
/// <summary>
/// Gets hashcode.
/// </summary>
/// <returns>Hash code.</returns>
public override int GetHashCode()
{
return StringComparer.OrdinalIgnoreCase.GetHashCode(this.MetricName);
}
/// <summary>
/// Overrides Object.Equals.
/// </summary>
/// <param name="obj">Object to compare with.</param>
/// <returns>True if equals, false otherwise.</returns>
public override bool Equals(object obj)
{
if (!(obj is MetricConfigurationV2))
{
return false;
}
return this.Equals((MetricConfigurationV2)obj);
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="BitAggregateMagic.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.BitHelper
{
using System.Runtime.CompilerServices;
/// <summary>
/// The Aggregate Magic Algorithms adapted from @ http://aggregate.org/MAGIC/
/// </summary>
public static class BitAggregateMagic
{
/// <summary>
/// The number of bits in long integer.
/// </summary>
public const byte NumBitsInLongInteger = 64;
/// <summary>
/// Counts the number of set bits in <paramref name="x"/>.
/// </summary>
/// <param name="x">The target for which to count the number of set bits.</param>
/// <returns>The number of set bits in <paramref name="x"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int CountOneBits(ulong x)
{
x = (x & 0x5555555555555555) + ((x >> 1) & 0x5555555555555555);
x = (x & 0x3333333333333333) + ((x >> 2) & 0x3333333333333333);
x = (x & 0x0f0f0f0f0f0f0f0f) + ((x >> 4) & 0x0f0f0f0f0f0f0f0f);
x = (x & 0x00ff00ff00ff00ff) + ((x >> 8) & 0x00ff00ff00ff00ff);
x = (x & 0x0000ffff0000ffff) + ((x >> 16) & 0x0000ffff0000ffff);
x = (x & 0x00000000ffffffff) + ((x >> 32) & 0x00000000ffffffff);
return (int)x;
}
/// <summary>
/// Counts the number of leading zeros.
/// </summary>
/// <param name="x">The target for which to count the number of leading zeros.</param>
/// <returns>The number of leading zeros.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int CountLeadingZeros(long x)
{
if (x < 0)
{
return 0;
}
x |= x >> 1;
x |= x >> 2;
x |= x >> 4;
x |= x >> 8;
x |= x >> 16;
x |= x >> 32;
return NumBitsInLongInteger - CountOneBits((ulong)x);
}
/// <summary>
/// Counts the number of trailing zeros.
/// </summary>
/// <param name="x">The target for which to count the number of trailing zeros.</param>
/// <returns>The number of trailing zeros.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int CountTrailingZeros(long x)
{
return CountOneBits((ulong)(x & -x) - 1);
}
/// <summary>
/// Given a binary integer value x, the next largest power of 2 can be computed by a SWAR
/// algorithm that recursively "folds" the upper bits into the lower bits.
/// This process yields a bit vector with the same most significant 1 as x, but all 1's below it.
/// Adding 1 to that value yields the next largest power of 2.
/// </summary>
/// <param name="x">The value for which the next largest power of 2 is needed</param>
/// <returns>The next largest power of 2 for x which is strictly more than x
/// Note that for x = 32 the result is 64, not 32.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int NextLargestPowerOf2(int x)
{
x |= x >> 1;
x |= x >> 2;
x |= x >> 4;
x |= x >> 8;
x |= x >> 16;
return x + 1;
}
/// <summary>
/// Checks whether x is a power of two.
/// </summary>
/// <param name="x">The number to check,</param>
/// <returns>true if x is a power of 2, false otherwise. (Note that 0 is a power of 2)</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool IsPowerOfTwo(ulong x)
{
return (x & (x - 1)) == 0;
}
/// <summary>
/// Changes uinsigned int endian encoding.
/// Can be used both ways: BE->LE and LE->BE.
/// </summary>
/// <param name="x">Value to change.</param>
/// <returns>Value with changed endian encoding.</returns>
public static uint ChangeEndianness(uint x)
{
// swap adjacent 16-bit blocks
x = (x >> 16) | (x << 16);
// swap adjacent 8-bit blocks
return ((x & 0xFF00FF00) >> 8) | ((x & 0x00FF00FF) << 8);
}
/// <summary>
/// Changes uinsigned long endian encoding.
/// Can be used both ways: BE->LE and LE->BE.
/// </summary>
/// <param name="x">Value to change.</param>
/// <returns>Value with changed endian encoding.</returns>
public static ulong ChangeEndianness(ulong x)
{
// swap adjacent 32-bit blocks
x = (x >> 32) | (x << 32);
// swap adjacent 16-bit blocks
x = ((x & 0xFFFF0000FFFF0000) >> 16) | ((x & 0x0000FFFF0000FFFF) << 16);
// swap adjacent 8-bit blocks
return ((x & 0xFF00FF00FF00FF00) >> 8) | ((x & 0x00FF00FF00FF00FF) << 8);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="EtlBacklogConfig.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Type that contains the information describing how backlog of ETL files should be processed.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
/// <summary>
/// Type that contains the information describing how backlog of ETL files should be processed.
/// </summary>
internal struct EtlBacklogConfig
{
/// <summary>
/// Initializes a new instance of the <see cref="EtlBacklogConfig"/> struct.
/// </summary>
/// <param name="shouldReceiveBacklogFiles">
/// A boolean indicating whether the subscriber should receive any backlog of ETL files.
/// </param>
/// <param name="backlogTargetStartTime">
/// The point in time from which the subscriber wants to receive backlog files.
/// </param>
/// <param name="maxBacklogFilesRequested">
/// The maximum number of backlog files that are going to be passed to the subscriber.
/// </param>
public EtlBacklogConfig(bool shouldReceiveBacklogFiles, DateTime backlogTargetStartTime, int maxBacklogFilesRequested)
: this()
{
this.ShouldReceiveBacklogFiles = shouldReceiveBacklogFiles;
this.TargetStartTimeUtc = backlogTargetStartTime;
this.MaxFiles = maxBacklogFilesRequested;
}
/// <summary>
/// Gets a value indicating whether the subscriber should receive any backlog of ETL files.
/// </summary>
public bool ShouldReceiveBacklogFiles { get; private set; }
/// <summary>
/// Gets the point in time from which the subscriber wants to receive backlog files.
/// </summary>
/// <remarks>
/// Since this point in time is likely contained in the middle of an ETL file and the
/// dispatchers do not drop events prior to this time on behalf of the subscriber, the
/// subscriber itself should be prepared to receive and ignore events prior to this point
/// in time.
/// </remarks>
/// <remarks>
/// If the number of backlog files to be processed from this point in time exceeds the
/// <see cref="MaxFiles"/> the latter has priority and only that number of backlog files
/// are going to be sent to the subscriber.
/// </remarks>
public DateTime TargetStartTimeUtc { get; private set; }
/// <summary>
/// Gets the maximum number of backlog files that are going to be passed to the subscriber.
/// This is one way to put some kind of upper bound on the amount of work triggered by backlog
/// files.
/// </summary>
/// <remarks>
/// Notice how this value relates to the <see cref="TargetStartTimeUtc"/>.
/// </remarks>
public int MaxFiles { get; private set; }
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="OperationStatus.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// Operation result status Type.
/// </summary>
public enum OperationStatus
{
CompleteSuccess = 0,
ResourceNotFound = 1,
ConnectionError = 2,
FolderCreationError = 3,
FileSaveError = 4,
ResourceGetError = 5,
ResourcePostError = 6,
ResourceSkipped = 7,
FileCorrupted = 8
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricEnrichmentRuleTransformationDefinition.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricEnrichmentRuleManagement
{
using System.Collections.Generic;
/// <summary>
/// Represents a tranformation definition to be applied to the metric event during metric data enrichment.
/// </summary>
public sealed class MetricEnrichmentRuleTransformationDefinition
{
/// <summary>
/// Initializes a new instance of the <see cref="MetricEnrichmentRuleTransformationDefinition"/> class.
/// </summary>
/// <param name="executorId">The executor id to be used for transformation. Example: IdMapping based Addition Executor.</param>
/// <param name="sourceEventDimensionNamesForKey">The source event dimension names the dimension names whose value will be used for building the key for lookup table in external data service.</param>
/// <param name="executorConnectionStringOverride">The executor connection string override for executor connection string.</param>
/// <param name="transformationType">The type of transformation the rule represents..</param>
/// <param name="destinationColumnNamesForDimensions">
/// The map where Key represents destination column to be used to extract dimension values
/// and Value represents the final dimension to be used for the destination column value.
/// </param>
public MetricEnrichmentRuleTransformationDefinition(
string executorId,
List<string> sourceEventDimensionNamesForKey,
string executorConnectionStringOverride,
MetricEnrichmentTransformationType transformationType,
Dictionary<string, string> destinationColumnNamesForDimensions)
{
this.ExecutorId = executorId;
this.SourceEventDimensionNamesForKey = sourceEventDimensionNamesForKey;
this.ExecutorConnectionStringOverride = executorConnectionStringOverride;
this.TransformationType = transformationType;
this.DestinationColumnNamesForDimensions = destinationColumnNamesForDimensions;
}
/// <summary>
/// Gets or sets the executor id to be used for transformation. Example: IdMapping based Addition Executor.
/// </summary>
public string ExecutorId { get; }
/// <summary>
/// Gets or sets the dimension names whose value will be used for building the key for lookup table in external data service.
/// </summary>
public List<string> SourceEventDimensionNamesForKey { get; }
/// <summary>
/// Gets or sets the override for executor connection string.
/// </summary>
public string ExecutorConnectionStringOverride { get; }
/// <summary>
/// Represents the type of transformation the rule represents.
/// </summary>
public MetricEnrichmentTransformationType TransformationType { get; }
/// <summary>
/// Gets or sets the map
/// where Key represents destination column to be used to extract dimension values
/// and Value represents the final dimension to be used for the destination column value.
/// </summary>
public Dictionary<string, string> DestinationColumnNamesForDimensions { get; }
/// <summary>
/// Validates the data is valid rule.
/// </summary>
/// <returns>
/// Validation failure message, empty means validation passed.
/// </returns>
internal string Validate()
{
if (string.IsNullOrEmpty(this.ExecutorId))
{
// TODO: Once executor's are finalized add executor id validation too.
return "Executor id cannot be null";
}
return string.Empty;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IComputedSamplingTypeExpression.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// An expression that can be evaluated for computed sampling types or composite metrics.
/// </summary>
public interface IComputedSamplingTypeExpression : IExpression
{
/// <summary>
/// Gets a value indicating whether this instance is built in.
/// </summary>
bool IsBuiltIn { get; }
/// <summary>
/// Gets or sets the unit.
/// </summary>
string Unit { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="UserAccessTokenRefresher.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Logging;
using Microsoft.IdentityModel.Clients.ActiveDirectory;
/// <summary>
/// A helper class to retrieve and refresh the user access token.
/// </summary>
public sealed class UserAccessTokenRefresher
{
/// <summary>
/// The bearer token authentication scheme.
/// </summary>
public const string BearerTokenAuthScheme = "Bearer";
private const string Authority = "https://login.microsoftonline.com/microsoft.onmicrosoft.com";
private const string TargetResource = "http://GenevaMDM/";
private const string ClientId = "8434f70a-4f5a-4dbe-8c83-c8ca8029ea22";
private static readonly object LogId = Logger.CreateCustomLogId("UserAccessTokenRefresher");
private static Lazy<UserAccessTokenRefresher> instance = new Lazy<UserAccessTokenRefresher>(() => new UserAccessTokenRefresher());
private readonly Uri clientRedirectUri = new Uri("http://GenevaMDMClient");
private readonly SemaphoreSlim accessTokenRefreshLock = new SemaphoreSlim(1);
private string userAccessToken;
private DateTime lastAccessTokenRefreshTime;
/// <summary>
/// Gets the instance.
/// </summary>
internal static UserAccessTokenRefresher Instance
{
get
{
return instance.Value;
}
}
/// <summary>
/// Gets the user access token.
/// </summary>
internal string UserAccessToken
{
get
{
if (this.userAccessToken == null)
{
this.RefreshAccessToken().Wait();
if (this.userAccessToken == null)
{
throw new MetricsClientException("Failed to obtain an AAD user access token.");
}
}
return this.userAccessToken;
}
}
/// <summary>
/// Refreshes the access token.
/// </summary>
/// <returns>A <see cref="Task"/> representing the execution.</returns>
internal async Task RefreshAccessToken()
{
await this.accessTokenRefreshLock.WaitAsync();
try
{
// 10-minute is an arbitrary age and we try to request the AAD token only once when multiple HTTP clients try to renew the token at the same time.
if (this.lastAccessTokenRefreshTime < DateTime.UtcNow.AddMinutes(-10))
{
var authContext = new AuthenticationContext(Authority);
var result =
await authContext.AcquireTokenAsync(TargetResource, ClientId, this.clientRedirectUri, new PlatformParameters()).ConfigureAwait(false);
this.userAccessToken = result.AccessToken;
this.lastAccessTokenRefreshTime = DateTime.UtcNow;
Logger.Log(LoggerLevel.Info, LogId, "RefreshAccessToken", "Succeeded");
}
}
catch (Exception e)
{
Logger.Log(LoggerLevel.Error, LogId, "RefreshAccessToken", e.ToString());
throw;
}
finally
{
this.accessTokenRefreshLock.Release();
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="StampLevelMetricEnrichmentRuleManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricEnrichmentRuleManagement
{
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using MetricEnrichmentRuleManagement;
using Microsoft.Cloud.Metrics.Client.Utility;
using Newtonsoft.Json;
/// <summary>
/// This class manages get and save operations on stamp level metric enrichment rules ( only service admins are authorized to modify stamp level rules).
/// </summary>
public sealed class StampLevelMetricEnrichmentRuleManager
{
private readonly ConnectionInfo connectionInfo;
private readonly HttpClient httpClient;
private readonly string configurationUrlPrefix;
private readonly JsonSerializerSettings serializerSettings;
/// <summary>
/// Initializes a new instance of the <see cref="StampLevelMetricEnrichmentRuleManager"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the MDM endpoint being used.</param>
public StampLevelMetricEnrichmentRuleManager(ConnectionInfo connectionInfo)
{
if (connectionInfo == null)
{
throw new ArgumentNullException(nameof(connectionInfo));
}
this.connectionInfo = connectionInfo;
this.configurationUrlPrefix = this.connectionInfo.GetAuthRelativeUrl("v1/config/enrichmentrules/");
this.httpClient = HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo);
this.serializerSettings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.Auto
};
}
/// <summary>
/// Gets all the enrichment rules configured for given monitoring account.
/// </summary>
/// <returns>All enrichment rules for the given monitoring account.</returns>
public async Task<IReadOnlyList<MetricEnrichmentRule>> GetAllAsync()
{
var path = $"{this.configurationUrlPrefix}getAll";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(string.Empty))
{
Path = path
};
var response = await HttpClientHelper.GetResponseAsStringAsync(
uriBuilder.Uri,
HttpMethod.Get,
this.httpClient,
string.Empty,
this.configurationUrlPrefix).ConfigureAwait(false);
var rules = JsonConvert.DeserializeObject<List<MetricEnrichmentRule>>(response, this.serializerSettings);
return rules;
}
/// <summary>
/// Save the metric configuration provided.
/// </summary>
/// <param name="rule">Rule to save.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task SaveAsync(MetricEnrichmentRule rule)
{
if (rule == null)
{
throw new ArgumentNullException(nameof(rule));
}
var validationFailureMessage = rule.Validate();
if (!string.IsNullOrEmpty(validationFailureMessage))
{
throw new ArgumentException(validationFailureMessage);
}
if (!rule.MonitoringAccountFilter.Equals("*"))
{
throw new ArgumentException("Monitoring account needs to be * as this is stamp level rule.");
}
var path = $"{this.configurationUrlPrefix}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(string.Empty))
{
Path = path,
Query = "apiVersion=1"
};
var serializedMetric = JsonConvert.SerializeObject(rule, Formatting.Indented, this.serializerSettings);
await HttpClientHelper.GetResponseAsStringAsync(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
string.Empty,
this.configurationUrlPrefix,
serializedContent: serializedMetric).ConfigureAwait(false);
}
/// <summary>
/// Deletes the enrichment rule.
/// </summary>
/// <param name="rule">Rule to be deleted.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task DeleteAsync(MetricEnrichmentRule rule)
{
if (rule == null)
{
throw new ArgumentNullException(nameof(rule));
}
var validationFailureMessage = rule.Validate();
if (!string.IsNullOrEmpty(validationFailureMessage))
{
throw new ArgumentException(validationFailureMessage);
}
var path = $"{this.configurationUrlPrefix}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(string.Empty))
{
Path = path
};
var serializedMetric = JsonConvert.SerializeObject(rule, Formatting.Indented, this.serializerSettings);
await HttpClientHelper.GetResponseAsStringAsync(
uriBuilder.Uri,
HttpMethod.Delete,
this.httpClient,
string.Empty,
this.configurationUrlPrefix,
serializedContent: serializedMetric).ConfigureAwait(false);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Histogram.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// <author email="selavrin">
// <NAME>
// </author>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.Linq;
using global::Metrics.Services.Common;
/// <summary>
/// Represents a histogram.
/// </summary>
public sealed class Histogram : IReadOnlyHistogram, IPoolTrackable
{
private readonly List<KeyValuePair<ulong, uint>> histogram = new List<KeyValuePair<ulong, uint>>();
private uint count;
/// <inheritdoc />
PoolObjectTrackingInfo IPoolTrackable.PoolObjectTrackingInfo { get; set; }
/// <summary>
/// Gets the number of samples in the histogram.
/// </summary>
public int SamplesCount => this.histogram.Count;
/// <summary>
/// Gets the list of histogram samples: ordered pairs of value-count.
/// </summary>
public IEnumerable<KeyValuePair<ulong, uint>> Samples => this.histogram;
/// <summary>
/// Reinitializes histogram object with new data.
/// </summary>
/// <param name="histogramData">Unordered pairs of value-count from which histogram will be constructed.</param>
public void Initialize(IEnumerable<KeyValuePair<ulong, uint>> histogramData)
{
this.histogram.Clear();
this.histogram.AddRange(histogramData);
this.histogram.Sort((i1, i2) => (int)i1.Key - (int)i2.Key);
this.count = (uint)this.histogram.Sum(h => h.Value);
}
/// <summary>
/// Calculates percentile from the histogram.
/// </summary>
/// <param name="percent">Percent value for which to calculate percentile.</param>
/// <returns>Percentile value.</returns>
public float GetPercentile(float percent)
{
if (this.count == 0)
{
return 0;
}
if (percent < 0 || percent > 100)
{
throw new ArgumentOutOfRangeException(nameof(percent), "Percent should be within [0;100] range.");
}
// Find index of the first value, whose last entry index is higher than index of the percentile for given percent
float percentileIndex = percent * this.count / 100;
uint currentIndex = 0;
int index = -1;
for (int i = 0; i < this.histogram.Count; ++i)
{
currentIndex += this.histogram[i].Value;
if (percentileIndex <= currentIndex)
{
index = i;
break;
}
}
// Calculate percentile value based on found index
if (index == 0)
{
return this.histogram[index].Key;
}
// When percentile index lies between two row values in the original sorted array, use weighted average to calculate percentile value (same approach is used in PerfCollector)
var coefficient = percentileIndex - currentIndex + this.histogram[index].Value;
return coefficient < 1 ?
(this.histogram[index - 1].Key * (1 - coefficient)) + (this.histogram[index].Key * coefficient) : this.histogram[index].Key;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="QueryFilter.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
/// <summary>
/// Operator used to compare with the <see cref="QueryFilter.Operand"/>.
/// </summary>
public enum Operator
{
/// <summary>
/// Operator was not specified, query is not considered valid.
/// </summary>
Undefined,
/// <summary>
/// Operator equal.
/// </summary>
Equal,
/// <summary>
/// Operator not equal.
/// </summary>
NotEqual,
/// <summary>
/// Operator greater than.
/// </summary>
GreaterThan,
/// <summary>
/// Operator less than.
/// </summary>
LessThan,
/// <summary>
/// Operator less than or equal.
/// </summary>
LessThanOrEqual,
/// <summary>
/// Operator greater than or equal.
/// </summary>
GreaterThanOrEqual
}
/// <summary>
/// Type that represents the filter used in the query.
/// </summary>
public sealed class QueryFilter
{
/// <summary>
/// Filter object representing that no filtering should be done.
/// </summary>
public static readonly QueryFilter NoFilter = new QueryFilter(Operator.Undefined, 0.0);
/// <summary>
/// Initializes a new instance of the <see cref="QueryFilter"/> class.
/// Create a filter to be used in a filtered dimension query.
/// </summary>
/// <param name="operator">The operator.</param>
/// <param name="operand">The operand.</param>
public QueryFilter(Operator @operator, double operand)
{
this.Operator = @operator;
this.Operand = operand;
}
/// <summary>
/// Operator to use when comparing time series aggregate to <see cref="Operand"/>.
/// </summary>
public Operator Operator { get; private set; }
/// <summary>
/// The value to compare to.
/// </summary>
public double Operand { get; private set; }
/// <summary>
/// Returns a string representing the current values of the instance, helpful for debugging and logging.
/// </summary>
/// <returns>
/// A string representing the current values of the instance.
/// </returns>
public override string ToString()
{
return string.Format("{0} {1}", this.Operator, this.Operand);
}
/// <summary>
/// Determine if the provided object is equal to this <see cref="QueryFilter"/>.
/// </summary>
/// <param name="obj">The object.</param>
/// <returns>The result of the equality test.</returns>
public override bool Equals(object obj)
{
return this.Equals(obj as QueryFilter);
}
/// <summary>
/// Get hash code for this object.
/// </summary>
/// <returns>The hash code.</returns>
public override int GetHashCode()
{
return this.Operator.GetHashCode() ^ this.Operand.GetHashCode();
}
/// <summary>
/// Compare the provided <see cref="QueryFilter"/> to this one.
/// </summary>
/// <param name="otherFilter">The other query filter.</param>
/// <returns>The result of the equality test.</returns>
private bool Equals(QueryFilter otherFilter)
{
if (otherFilter == null)
{
return false;
}
return this.Operator.Equals(otherFilter.Operator)
&& this.Operand == otherFilter.Operand;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ProductionGlobalEnvironmentResolver.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using Microsoft.Cloud.Metrics.Client.Logging;
using Newtonsoft.Json;
/// <summary>
/// A helper class to resolve the production global environment.
/// </summary>
/// <remarks>
/// The LX project consists of two distinct clouds. Neither will have connectivity to external networks and L3 hosted DNS names cannot be used.
/// This impacts our current behavior of users not specifying a specific endpoint and us utilizing the fixed L3 hosted global stamp DNS to seed locating the appropriate owning stamp for a given account.
/// The goal is to allow this behavior to continue while not requiring customers to modify their code and configuration when moving into LX,
/// and this helper class tries to determine the production global environment by resolving the DNS names of a list of potential global environments and return the first succeeded one.
/// </remarks>
public class ProductionGlobalEnvironmentResolver
{
private static readonly object LogId = Logger.CreateCustomLogId("ProductionGlobalEnvironmentResolver");
private static readonly string[] PotentialProductionGlobalEnvironments =
{
"global.metrics.nsatc.net",
"global.metrics.trafficmanager.net",
};
private static string globalStampHostName;
/// <summary>
/// Gets the global stamp host name.
/// </summary>
/// <returns>The global stamp host name.</returns>
public static string ResolveGlobalStampHostName()
{
if (globalStampHostName != null)
{
return globalStampHostName;
}
for (int i = 0; i < PotentialProductionGlobalEnvironments.Length; i++)
{
var resolvedIp = ConnectionInfo.ResolveIp(PotentialProductionGlobalEnvironments[i], throwOnFailure: false).GetAwaiter().GetResult();
if (resolvedIp != null)
{
globalStampHostName = PotentialProductionGlobalEnvironments[i];
return PotentialProductionGlobalEnvironments[i];
}
Logger.Log(LoggerLevel.Error, LogId, "ProductionGlobalEnvironmentResolver", $"Failed to resolve {PotentialProductionGlobalEnvironments[i]}.");
}
string errorMsg = $"ProductionGlobalEnvironmentResolver - None of the host names can be resolved: {JsonConvert.SerializeObject(PotentialProductionGlobalEnvironments)}.";
Logger.Log(LoggerLevel.Error, LogId, "ProductionGlobalEnvironmentResolver", errorMsg);
throw new MetricsClientException(errorMsg);
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="BitBinaryWriterV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.BitHelper
{
using System;
using System.IO;
using System.Runtime.CompilerServices;
/// <summary>
/// A class which allows writing bits to the stream.
/// We accumulate current byte value bit by bit and when it is full, it is written to the stream.
/// <see cref="Flush" /> method must be called at the end since the last byte to write to stream could be partial.
/// </summary>
public sealed class BitBinaryWriterV2
{
private readonly BinaryWriter writer;
/// <summary>
/// Initializes a new instance of the <see cref="BitBinaryWriterV2"/> class.
/// </summary>
/// <param name="writer">The writer.</param>
public BitBinaryWriterV2(BinaryWriter writer)
{
this.writer = writer;
this.CurrentBitIndex = 0;
this.CurrentByte = 0;
}
/// <summary>
/// Gets the current bit position in the buffer.
/// </summary>
public byte CurrentBitIndex { get; private set; }
/// <summary>
/// Gets the value of the currently accumulated byte.
/// </summary>
public byte CurrentByte { get; private set; }
/// <summary>
/// Gets the underlying binary writer.
/// </summary>
public BinaryWriter BinaryWriter => this.writer;
/// <summary>
/// Write bits to the stream.
/// </summary>
/// <param name="oldValue">The value.</param>
/// <param name="numBits">The number bits.</param>
/// <param name="positionOfLeastSignificantBit">The position of least significant bit, starting with 0.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void WriteBits(long oldValue, int numBits, int positionOfLeastSignificantBit)
{
var l = &oldValue;
var value = *(ulong*)l;
int endBit = 64 - positionOfLeastSignificantBit;
int startBit = 64 - (numBits + positionOfLeastSignificantBit);
var bitsLeftToCopy = endBit - startBit;
value <<= startBit;
while (bitsLeftToCopy > 0)
{
var bitsToCopy = Math.Min(bitsLeftToCopy, 8 - this.CurrentBitIndex);
this.CurrentByte = (byte)(((ulong)this.CurrentByte << (bitsToCopy - 1)) | (value >> (64 - bitsToCopy)));
value <<= bitsToCopy;
bitsLeftToCopy -= bitsToCopy;
this.CurrentBitIndex += (byte)bitsToCopy;
if (this.CurrentBitIndex == 8)
{
this.writer.Write(this.CurrentByte);
this.CurrentByte = 0;
this.CurrentBitIndex = 0;
}
else
{
this.CurrentByte <<= 1;
}
}
}
/// <summary>
/// Write bit to the stream.
/// </summary>
/// <param name="bit">Bit value to write, where True means 1 and False means 0.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void WriteBit(bool bit)
{
this.CurrentByte = (byte)(this.CurrentByte | (bit ? 1 : 0));
if (++this.CurrentBitIndex == 8)
{
this.writer.Write(this.CurrentByte);
this.CurrentByte = 0;
this.CurrentBitIndex = 0;
}
else
{
this.CurrentByte <<= 1;
}
}
/// <summary>
/// Writes uint value Base-128 encoded.
/// </summary>
/// <param name="value">Value to write.</param>
public void WriteUInt32AsBase128(uint value)
{
this.WriteUInt64AsBase128(value);
}
/// <summary>
/// Writes ulong value Base-128 encoded.
/// </summary>
/// <param name="value">Value to write.</param>
public void WriteUInt64AsBase128(ulong value)
{
var t = value;
do
{
var b = (byte)(t & 0x7f);
t >>= 7;
if (t > 0)
{
b |= 0x80;
}
this.WriteBits(b, 8, 0);
}
while (t > 0);
}
/// <summary>
/// Flush the current byte into stream even if it is partial.
/// </summary>
public void WriteTillEndOfByteBoundary()
{
if (this.CurrentBitIndex != 0)
{
var value = this.CurrentByte << (7 - this.CurrentBitIndex);
this.writer.Write((byte)value);
this.CurrentBitIndex = 0;
this.CurrentByte = 0;
this.CurrentBitIndex = 0;
}
}
/// <summary>
/// Flush the current byte into stream even if it is partial.
/// </summary>
public void Flush()
{
if (this.CurrentBitIndex != 0)
{
var value = this.CurrentByte << (7 - this.CurrentBitIndex);
this.writer.Write((byte)value);
}
}
}
}
<file_sep>// <copyright file="SamplingTypesExtensionMethods.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
namespace Microsoft.Online.Metrics.Serialization
{
using System.Runtime.CompilerServices;
/// <summary>
/// The helper class to hold the extension methods for <see cref="SamplingTypes"/> type.
/// </summary>
public static class SamplingTypesExtensionMethods
{
/// <summary>
/// Returns true if the samplingTypes instance has value as one of the bits set.
/// </summary>
/// <param name="samplingTypes">The sampling types to check.</param>
/// <param name="value">The value to look for.</param>
/// <returns>True if value is present in samplingTypes, false otherwise.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool Includes(this SamplingTypes samplingTypes, SamplingTypes value)
{
return (samplingTypes & value) == value;
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MonitoringAccountConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Utility;
using Newtonsoft.Json;
/// <summary>
/// This class manages get and save operations on monitoring account configurations.
/// </summary>
public sealed class MonitoringAccountConfigurationManager : IMonitoringAccountConfigurationManager
{
private readonly ConnectionInfo connectionInfo;
private readonly HttpClient httpClient;
private readonly string monitoringAccountUrlPrefix;
private readonly string tenantUrlPrefix;
private readonly string operation;
private readonly JsonSerializerSettings serializerSettings;
/// <summary>
/// Initializes a new instance of the <see cref="MonitoringAccountConfigurationManager"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the MDM endpoint being used.</param>
public MonitoringAccountConfigurationManager(ConnectionInfo connectionInfo)
{
if (connectionInfo == null)
{
throw new ArgumentNullException(nameof(connectionInfo));
}
this.connectionInfo = connectionInfo;
this.operation = this.connectionInfo.GetAuthRelativeUrl("v1/config");
this.monitoringAccountUrlPrefix = this.operation + "/monitoringAccount/";
this.tenantUrlPrefix = this.operation + "/tenant/";
this.httpClient = HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo);
var migrations = new[]
{
new ClientAssemblyMigration(
"Metrics.Server",
"Microsoft.Online.Metrics.Server.Utilities.ConfigurationUpdateResult",
typeof(ConfigurationUpdateResult))
};
this.serializerSettings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.Auto,
Binder = new ClientAssemblyMigrationSerializationBinder(migrations)
};
}
/// <summary>
/// Get the monitoring account specified by the monitoring account name.
/// </summary>
/// <param name="monitoringAccountName">The name of the monitoring account.</param>
/// <returns>The monitoring account.</returns>
public async Task<IMonitoringAccount> GetAsync(string monitoringAccountName)
{
if (string.IsNullOrWhiteSpace(monitoringAccountName))
{
throw new ArgumentException("Monitoring account must not be blank or null.");
}
var path = $"{this.monitoringAccountUrlPrefix}/{monitoringAccountName}";
// Call CheckIfGlobalEndpointWithRetry to avoid exception thrown by unresolved globalEnvrionments.
if (this.connectionInfo.CheckIfGlobalEndpointWithRetry())
{
// If the global endpoint is used, account information can be retrieved from the cache.
path += "/cache/true";
}
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccountName))
{
Path = path,
Query = "version=1"
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Get,
this.httpClient,
monitoringAccountName,
this.operation).ConfigureAwait(false);
var settings = new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto };
return JsonConvert.DeserializeObject<MonitoringAccount>(
response.Item1,
settings);
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode.HasValue &&
mce.ResponseStatusCode.Value == HttpStatusCode.NotFound)
{
throw new AccountNotFoundException(
$"Account [{monitoringAccountName}] not found. TraceId: [{mce.TraceId}]", mce);
}
throw;
}
}
/// <summary>
/// Creates a monitoring account with provided configuration.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration.</param>
/// <param name="stampHostName">The stamp name such as prod3.metrics.nsatc.net as documented @ https://aka.ms/mdm-endpoints.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task CreateAsync(IMonitoringAccount monitoringAccount, string stampHostName)
{
if (string.IsNullOrWhiteSpace(stampHostName))
{
throw new ArgumentException("value is null or empty", nameof(stampHostName));
}
if (this.connectionInfo.Endpoint != null)
{
throw new ArgumentException("'Endpoint' must not be specified in the constructor for ConnectionInfo to create monitoring accounts.");
}
// Check if the client has service admin permission in the global stamp; if not, try to create the account in the target stamp directly.
var globalStampEndpoint = ConnectionInfo.ResolveGlobalEnvironments()[(int)this.connectionInfo.MdmEnvironment];
bool hasAccountCreationPermissionInGlobalStamp = await this.HasAccountCreationPermission(globalStampEndpoint).ConfigureAwait(false);
var endpoint = hasAccountCreationPermissionInGlobalStamp ? globalStampEndpoint : $"https://{stampHostName}/";
var url = $"{endpoint}{this.monitoringAccountUrlPrefix}{monitoringAccount.Name}/stamp/{stampHostName}";
await this.PostAsync(monitoringAccount, url).ConfigureAwait(false);
}
/// <summary>
/// Create a new monitoring account named <paramref name="newMonitoringAccountName"/> on stamp <paramref name="stampHostName"/> by copying the common settings from <paramref name="monitoringAccountToCopyFrom" />.
/// </summary>
/// <param name="newMonitoringAccountName">The new monitoring account name.</param>
/// <param name="monitoringAccountToCopyFrom">The name of the monitoring account where common settings are copied from.</param>
/// <param name="stampHostName">The stamp name such as prod3.metrics.nsatc.net as documented @ https://aka.ms/mdm-endpoints.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task CreateAsync(string newMonitoringAccountName, string monitoringAccountToCopyFrom, string stampHostName)
{
if (string.IsNullOrWhiteSpace(newMonitoringAccountName))
{
throw new ArgumentException("value is null or empty", nameof(newMonitoringAccountName));
}
if (string.IsNullOrWhiteSpace(monitoringAccountToCopyFrom))
{
throw new ArgumentException("value is null or empty", nameof(monitoringAccountToCopyFrom));
}
if (string.IsNullOrWhiteSpace(stampHostName))
{
throw new ArgumentException("value is null or empty", nameof(stampHostName));
}
if (this.connectionInfo.Endpoint != null)
{
throw new ArgumentException("'Endpoint' must not be specified in the constructor for ConnectionInfo to create monitoring accounts.");
}
// Check if the client has service admin permission in the global stamp; if not, try to create the account in the target stamp directly.
var globalStampEndpoint = ConnectionInfo.ResolveGlobalEnvironments()[(int)this.connectionInfo.MdmEnvironment];
bool hasAccountCreationPermissionInGlobalStamp = await this.HasAccountCreationPermission(globalStampEndpoint).ConfigureAwait(false);
var endpoint = hasAccountCreationPermissionInGlobalStamp ? globalStampEndpoint : $"https://{stampHostName}/";
var url = $"{endpoint}{this.monitoringAccountUrlPrefix}{newMonitoringAccountName}/stamp/{stampHostName}/copy/{monitoringAccountToCopyFrom}";
try
{
await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
newMonitoringAccountName,
this.operation).ConfigureAwait(false);
}
catch (MetricsClientException mce)
{
ThrowSpecificExceptionIfPossible(mce, newMonitoringAccountName);
throw;
}
}
/// <summary>
/// Save the monitoring account configuration provided.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration to save.</param>
/// <param name="skipVersionCheck">Flag indicating whether or not the version flag should be honored.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task SaveAsync(IMonitoringAccount monitoringAccount, bool skipVersionCheck = false)
{
var path =
$"{this.monitoringAccountUrlPrefix}/{monitoringAccount.Name}/skipVersionCheck/{skipVersionCheck}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount.Name))
{
Path = path
};
var url = uriBuilder.Uri.ToString();
await this.PostAsync(monitoringAccount, url).ConfigureAwait(false);
}
/// <summary>
/// Delete the monitoring account.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration to delete.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task DeleteAsync(string monitoringAccount)
{
var path = $"{this.tenantUrlPrefix}/{monitoringAccount}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount))
{
Path = path
};
var url = uriBuilder.Uri;
await HttpClientHelper.GetResponse(
url,
HttpMethod.Delete,
this.httpClient,
monitoringAccount,
this.operation).ConfigureAwait(false);
}
/// <summary>
/// Un-Delete the monitoring account.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration to un-delete.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task UnDeleteAsync(string monitoringAccount)
{
var path = $"{this.tenantUrlPrefix}/{monitoringAccount}/undelete";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount))
{
Path = path
};
var url = uriBuilder.Uri;
await HttpClientHelper.GetResponse(
url,
HttpMethod.Post,
this.httpClient,
monitoringAccount,
this.operation).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ConfigurationUpdateResult>> SyncMonitoringAccountConfigurationAsync(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var mirrorOperation = $"{this.monitoringAccountUrlPrefix}replicateConfigurationToMirrorAccounts";
var path =
$"{this.monitoringAccountUrlPrefix}{monitoringAccount.Name}/replicateConfigurationToMirrorAccounts/skipVersionCheck/{skipVersionCheck}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetGlobalEndpoint())
{
Path = path
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
mirrorOperation).ConfigureAwait(false);
return JsonConvert.DeserializeObject<ConfigurationUpdateResult[]>(
response.Item1,
this.serializerSettings);
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode == HttpStatusCode.Unauthorized)
{
var exMsg = $"Unable to sync configuration for monitoring account:{monitoringAccount.Name} as "
+ $"user doesn't have permission to update configurations. Response:{mce.Message}";
throw new ConfigurationValidationException(exMsg, ValidationType.ServerSide, mce);
}
throw;
}
}
private static void ThrowSpecificExceptionIfPossible(MetricsClientException mce, string monitoringAccountName)
{
if (mce.ResponseStatusCode.HasValue)
{
switch (mce.ResponseStatusCode.Value)
{
case HttpStatusCode.NotFound:
throw new AccountNotFoundException(
$"Account [{monitoringAccountName}] not found. TraceId: [{mce.TraceId}]",
mce);
case HttpStatusCode.BadRequest:
throw new ConfigurationValidationException(
$"Account [{monitoringAccountName}] could not be saved because validation failed. Response: {mce.Message}",
ValidationType.ServerSide,
mce);
}
}
}
/// <summary>
/// Validates that the monitoring account provided can be sent to the server to be saved.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration being saved.</param>
private static void Validate(IMonitoringAccount monitoringAccount)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(monitoringAccount.Name))
{
throw new ArgumentException("Monitoring account name cannot be null or empty.");
}
if (monitoringAccount.Permissions == null || !monitoringAccount.Permissions.Any())
{
throw new ArgumentException("One or more permissions must be specified for this account. These can include users, security groups, or certificates.");
}
}
private async Task<bool> HasAccountCreationPermission(string stampEndpoint)
{
var monitoringAccount = "Monitoring account is not relevant here";
var relativeUrl = this.connectionInfo.GetAuthRelativeUrl($"v1/config/security/writepermissions/tenant/{monitoringAccount}");
var urlToCheckPermission = $"{stampEndpoint}/{relativeUrl}";
Tuple<string, HttpResponseMessage> response;
try
{
response = await HttpClientHelper.GetResponse(
new Uri(urlToCheckPermission),
HttpMethod.Get,
this.httpClient,
monitoringAccount,
this.operation).ConfigureAwait(false);
}
catch (MetricsClientException e)
{
if (e.ResponseStatusCode == HttpStatusCode.Forbidden)
{
return false;
}
throw;
}
var permissions = JsonConvert.DeserializeObject<string[]>(response.Item1);
return permissions != null && permissions.Contains("TenantConfiguration");
}
private async Task PostAsync(IMonitoringAccount monitoringAccount, string url)
{
Validate(monitoringAccount);
var settings = new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto };
var serializedMonitoringAccount = JsonConvert.SerializeObject(
monitoringAccount,
Formatting.Indented,
settings);
try
{
await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
this.operation,
serializedContent: serializedMonitoringAccount).ConfigureAwait(false);
}
catch (MetricsClientException mce)
{
ThrowSpecificExceptionIfPossible(mce, monitoringAccount.Name);
throw;
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="NativeMethods.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Native methods required to interact with ETW.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
// ReSharper disable FieldCanBeMadeReadOnly.Global
// ReSharper disable MemberCanBePrivate.Global
// ReSharper disable UnusedMember.Global
/// <summary>
/// This type contains the P/Invoke declarations needed to interact with ETW.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2111:PointersShouldNotBeVisible", Justification = "Not accessible to any 3rd-party MS or not")]
internal static class NativeMethods
{
/// <summary>
/// Managed version for ERROR_SUCCESS. Windows error code that represents no error, i.e.: success.
/// </summary>
internal const uint ErrorSuccess = 0;
/// <summary>
/// Managed version for ERROR_INSUFFICIENT_BUFFER. The data area passed to a system call is too small.
/// </summary>
internal const uint ErrorInsufficientBuffer = 122;
/// <summary>
/// Managed version for ERROR_ALREADY_EXISTS. Cannot create some object because it already exists.
/// </summary>
internal const uint ErrorAlreadyExists = 183;
/// <summary>
/// Managed version for ERROR_MORE_DATA. More data is available.
/// </summary>
internal const uint ErrorMoreData = 234;
/// <summary>
/// Managed version for ERROR_WMI_GUID_NOT_FOUND. The GUID passed was not recognized as valid by a WMI data provider.
/// </summary>
internal const uint ErrorWmiGuidNotFound = 4200;
/// <summary>
/// Managed version for ERROR_WMI_INSTANCE_NOT_FOUND. The instance name passed was not recognized as valid by a WMI data provider.
/// </summary>
internal const uint ErrorWmiInstanceNotFound = 4201;
/// <summary>
/// Managed version for INVALID_HANDLE_VALUE. Win32 constant the represents an invalid handle.
/// </summary>
internal const ulong InvalidHandleValue = unchecked((ulong)(-1));
/// <summary>
/// Managed version for INVALID_TRACEHANDLE_64. Represents an invalid trace handle for 64bit apps.
/// </summary>
internal const ulong InvalidTracehandle64 = unchecked((ulong)(-1));
/// <summary>
/// Managed version for INVALID_TRACEHANDLE_32. Represents an invalid trace handle for 32bit apps.
/// </summary>
internal const ulong InvalidTracehandle32 = 0x00000000FFFFFFFF;
/// <summary>
/// Managed version for PROCESS_TRACE_MODE_REAL_TIME. Indicates to ETW to process the trace in
/// real-time (live) mode.
/// </summary>
internal const uint ProcessTraceModeRealTime = 0x00000100;
/// <summary>
/// Managed version for PROCESS_TRACE_MODE_RAW_TIMESTAMP. Indicates to ETW to return the event
/// timestamps with their raw value instead of transforming them to FILETIME.
/// </summary>
internal const uint ProcessTraceModeRawTimestamp = 0x00001000;
/// <summary>
/// Managed version for PROCESS_TRACE_MODE_EVENT_RECORD. Indicates to ETW to callback for each
/// event using the modern (Crimson) event format.
/// </summary>
internal const uint ProcessTraceModeEventRecord = 0x10000000;
/// <summary>
/// Managed version for EVENT_TRACE_REAL_TIME_MODE. Indicates to ETW to process the trace in real
/// time (live) mode.
/// </summary>
internal const uint EventTraceRealTimeMode = 0x00000100;
/// <summary>
/// Managed version for EVENT_TRACE_FILE_MODE_SEQUENTIAL. Indicates to ETW that the ETW events
/// should be written sequentially to a file.
/// </summary>
internal const uint EventTraceFileModeSequential = 0x00000001;
/// <summary>
/// Managed version for EVENT_TRACE_PRIVATE_LOGGER_MODE. Indicates to ETW that the session should
/// be in the private logger mode.
/// </summary>
internal const uint EventTracePrivateLoggerMode = 0x00000800;
/// <summary>
/// Managed version for EVENT_TRACE_INDEPENDENT_SESSION_MODE. Indicates that a logging session should
/// not be affected by EventWrite failures in other sessions. Without this flag, if an event cannot be
/// published to one of the sessions that a provider is enabled to, the event will not get published
/// to any of the sessions. When this flag is set, a failure to write an event to one session will not
/// cause the EventWrite function to return an error code in other sessions.
/// </summary>
/// <remarks>
/// Per e-mail thread with ETW owners there is no adverse impact (memory, latency, etc).
/// </remarks>
internal const uint EventTraceIndependentSessionMode = 0x08000000;
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_QUERY. Control code used by ETW to query the
/// properties of a tracing session.
/// </summary>
internal const uint EventTraceControlQuery = 0;
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_STOP. Control code used by ETW to stop a tracing
/// session.
/// </summary>
internal const uint EventTraceControlStop = 1;
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_UPDATE. Control code used by ETW to update the
/// properties of a tracing session.
/// </summary>
internal const uint EventTraceControlUpdate = 2;
/// <summary>
/// Managed version for EVENT_CONTROL_CODE_ENABLE_PROVIDER. Control code used by ETW to
/// enable providers.
/// </summary>
internal const uint EventControlCodeEnableProvider = 1;
/// <summary>
/// Managed version for WNODE_FLAG_TRACED_GUID. Value used to indicate the the structure
/// contains event tracing information.
/// </summary>
internal const uint WnodeFlagTracedGuid = 0x00020000;
/// <summary>
/// Managed delegate that represents the BufferCallback Win32 callback function.
/// </summary>
/// <param name="eventTraceLog">
/// It is really an EventTraceLogFile type with information about the buffer, however it is more efficient to marshal
/// it manually.
/// </param>
/// <returns>
/// True if the processing of the trace should continue, false to terminate the processing.
/// </returns>
public delegate bool EventTraceBufferCallback(
[In] IntPtr eventTraceLog);
/// <summary>
/// Managed delegate that represents the EventRecordCallback Win32 callback function.
/// </summary>
/// <param name="rawData">
/// Pointer to a EventRecord instance with the event record.
/// </param>
public unsafe delegate void EventRecordCallback(
[In] EventRecord* rawData);
/// <summary>
/// Clock resolution to use when logging the time stamp for each event.
/// </summary>
[SuppressMessage("Microsoft.Design", "CA1028:EnumStorageShouldBeInt32", Justification = "Original type used in native WinAPI is uint")]
public enum EtwSessionClockType : uint
{
/// <summary>
/// The default clock type to be used by the session, it is equivalent of selecting PerformanceCounter value.
/// </summary>
Default = 0,
/// <summary>
/// Indicates that the session uses the OS performance counter, a.k.a.: QPC. The resolution is typically 1000 times
/// less than the CPU frequency of the box. It is the recommended way to collect high-resolution timestamps in Windows.
/// </summary>
PerformanceCounter = 1,
/// <summary>
/// Indicates that the session uses the SystemTime clock (with actual resolution of ~15 milliseconds it is actually
/// the cheaper timestamp available to ETW, the downside is the lack of resolution).
/// </summary>
SystemTime = 2,
/// <summary>
/// Indicates that the session uses the CPU timestamp (RDTSC instruction to retrieve the TSC). It is the cheapest of
/// all with the higher resolution but not guaranteed to be in sync between different processors in the box.
/// </summary>
CpuTimestamp = 3,
}
/// <summary>
/// Control codes to be used with the <c>ControlTrace</c> Windows API.
/// </summary>
[SuppressMessage("Microsoft.Design", "CA1028:EnumStorageShouldBeInt32", Justification = "Original type used in native WinAPI is uint")]
public enum TraceControl : uint
{
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_QUERY. Control code used by ETW to query the
/// properties of a tracing session.
/// </summary>
Query = 0,
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_STOP. Control code used by ETW to stop a tracing
/// session.
/// </summary>
Stop = 1,
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_UPDATE. Control code used by ETW to update the
/// properties of a tracing session.
/// </summary>
Update = 2,
/// <summary>
/// Managed version for EVENT_TRACE_CONTROL_FLUSH. Control code used by ETW to update the
/// properties of a tracing session.
/// </summary>
Flush = 3,
}
/// <summary>
/// Mirrors the native TRACE_QUERY_INFO_CLASS enumerations.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364147(v=vs.85).aspx"/>
internal enum TraceQueryInfoClass
{
/// <summary>
/// Query an array of GUIDs of the providers that are registered on the computer.
/// </summary>
TraceGuidQueryList,
/// <summary>
/// Query information that each session used to enable the provider.
/// </summary>
TraceGuidQueryInfo,
/// <summary>
/// Query an array of GUIDs of the providers that registered themselves in the same
/// process as the calling process.
/// </summary>
TraceGuidQueryProcess,
/// <summary>
/// Query the setting for call stack tracing for kernel events.
/// The value is supported on Windows 7, Windows Server 2008 R2, and later.
/// </summary>
TraceStackTracingInfo,
/// <summary>
/// Query the setting for the EnableFlags for the system trace provider. For more information,
/// see the EVENT_TRACE_PROPERTIES structure.
/// The value is supported on Windows 8, Windows Server 2012, and later.
/// </summary>
TraceSystemTraceEnableFlagsInfo,
/// <summary>
/// Queries the setting for the sampling profile interval for the supplied source.
/// The value is supported on Windows 8, Windows Server 2012, and later.
/// </summary>
TraceSampledProfileIntervalInfo,
/// <summary>
/// Query which sources will be traced.
/// The value is supported on Windows 8, Windows Server 2012, and later.
/// </summary>
TraceProfileSourceConfigInfo,
/// <summary>
/// Query the setting for sampled profile list information.
/// The value is supported on Windows 8, Windows Server 2012, and later.
/// </summary>
TraceProfileSourceListInfo,
/// <summary>
/// Query the list of system events on which performance monitoring counters will be collected.
/// The value is supported on Windows 8, Windows Server 2012, and later.
/// </summary>
TracePmcEventListInfo,
/// <summary>
/// Query the list of performance monitoring counters to collect.
/// The value is supported on Windows 8, Windows Server 2012, and later.
/// </summary>
TracePmcCounterListInfo,
/// <summary>
/// Marks the last value in the enumeration. Do not use.
/// </summary>
MaxTraceSetInfoClass
}
/// <summary>
/// P/Invoke declaration for <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364117(v=vs.85).aspx">
/// StartTrace</see>.
/// </summary>
/// <param name="sessionHandle">
/// Handle to the event tracing session.
/// </param>
/// <param name="sessionName">
/// Name of the session.
/// </param>
/// <param name="properties">
/// Properties of the session.
/// </param>
/// <returns>
/// The Win32 error code of the call (zero indicates success, i.e. ERROR_SUCCESS Win32 error code).
/// </returns>
[DllImport("advapi32.dll", CharSet = CharSet.Unicode)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern int StartTrace(
[Out] out ulong sessionHandle,
[In] string sessionName,
[In][Out] IntPtr properties);
/// <summary>
/// P/Invoke declaration for <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363696(v=vs.85).aspx">
/// ControlTrace</see>.
/// </summary>
/// <param name="sessionHandle">
/// Handle of the event tracing session.
/// </param>
/// <param name="sessionName">
/// Name of the event tracing session.
/// </param>
/// <param name="properties">
/// Properties of the session.
/// </param>
/// <param name="controlCode">
/// Control code being passed to the session.
/// </param>
/// <returns>
/// The Win32 error code of the call (zero indicates success, i.e. ERROR_SUCCESS Win32 error code).
/// </returns>
[DllImport("advapi32.dll", CharSet = CharSet.Unicode)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern int ControlTrace(
[In] ulong sessionHandle,
[In] string sessionName,
[In][Out] IntPtr properties,
[In] uint controlCode);
/// <summary>
/// P/Invoke declaration for <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/dd392305(v=vs.85).aspx">
/// EnableTraceEx2</see>.
/// </summary>
/// <param name="traceHandle">
/// Handle to the trace session to which the provider is going to be enabled or disabled.
/// </param>
/// <param name="providerGuid">
/// Id of the provider to be enabled or disabled.
/// </param>
/// <param name="controlCode">
/// Control code to be passed to the provider.
/// </param>
/// <param name="level">
/// Importance level for which the provider is going to be set (only events with this severity and higher will be
/// collected).
/// </param>
/// <param name="matchAnyKeyword">
/// Events that will be collected need to match at least one of the bits in the keyword.
/// </param>
/// <param name="matchAllKeyword">
/// Only events matching all the bits in the keyword will be collected.
/// </param>
/// <param name="timeoutMilliseconds">
/// Timeout in milliseconds for the call to the method.
/// </param>
/// <param name="enableParameters">
/// Parameters used to enable the provider<see href="http://msdn.microsoft.com/en-us/library/windows/desktop/dd392306(v=vs.85).aspx"/>.
/// </param>
/// <returns>
/// The Win32 error code of the call (zero indicates success, i.e. ERROR_SUCCESS Win32 error code).
/// </returns>
[DllImport("advapi32.dll", CharSet = CharSet.Unicode)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern int EnableTraceEx2(
[In] ulong traceHandle,
[In] ref Guid providerGuid,
[In] uint controlCode,
[In] byte level,
[In] ulong matchAnyKeyword,
[In] ulong matchAllKeyword,
[In] uint timeoutMilliseconds,
[In][Optional] IntPtr enableParameters);
/// <summary>
/// P/Invoke declaration for ZeroMemory Win32 function.
/// </summary>
/// <param name="handle">
/// Handle to the memory to be zeroed.
/// </param>
/// <param name="length">
/// Number of bytes that should be zeroed.
/// </param>
[DllImport("kernel32.dll", SetLastError = true)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern void ZeroMemory(IntPtr handle, uint length);
/// <summary>
/// P/Invoke declaration for the <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364089(v=vs.85).aspx">
/// OpenTrace</see> function.
/// </summary>
/// <param name="traceLog">
/// Type with the information about the trace to be opened.
/// </param>
/// <returns>
/// If successful it returns a handle to the trace, otherwise a INVALID_PROCESSTRACE_HANDLE (note that this handle is
/// different if the process is running as a Windows on Windows).
/// </returns>
[DllImport(
"advapi32.dll",
EntryPoint = "OpenTraceW",
CharSet = CharSet.Unicode,
SetLastError = true)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern ulong OpenTrace(
[In][Out] ref EventTraceLogfilew traceLog);
/// <summary>
/// P/Invoke declaration for the <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364093(v=vs.85).aspx">
/// ProcessTrace</see> function.
/// </summary>
/// <param name="handleArray">
/// Array to with the handles of all traces to be processed.
/// </param>
/// <param name="handleCount">
/// Counter of the handles in the array.
/// </param>
/// <param name="startTime">
/// The start time for which one wants to receive events from the traces.
/// </param>
/// <param name="endTime">
/// The end time for which one wants to stop receiving events from the traces.
/// </param>
/// <returns>
/// It returns 0 (ERROR_SUCCESS) in case of success and Win32 system error code in case of error.
/// </returns>
[DllImport("advapi32.dll", CharSet = CharSet.Unicode)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern int ProcessTrace(
[In] ulong[] handleArray,
[In] uint handleCount,
[In] IntPtr startTime,
[In] IntPtr endTime);
/// <summary>
/// P/Invoke declaration for<see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363686(v=vs.85).aspx">CloseTrace</see> function.
/// </summary>
/// <param name="traceHandle">
/// The trace handle to be closed.
/// </param>
/// <returns>
/// It returns 0 (ERROR_SUCCESS) in case of success and Win32 system error code in case of error.
/// </returns>
[DllImport("advapi32.dll", CharSet = CharSet.Unicode)]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern int CloseTrace(
[In] ulong traceHandle);
/// <summary>
/// P/Invoke declaration for EnumerateTraceGuidsEx.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363714(v=vs.85).aspx"/>
/// <param name="traceQueryInfoClass">
/// Determines the type of information to include with the list of registered providers.
/// For possible values, see the TRACE_QUERY_INFO_CLASS enumeration.
/// </param>
/// <param name="inBuffer">
/// GUID of the provider whose information you want to retrieve. Specify the GUID only
/// if TraceQueryInfoClass is TraceGuidQueryInfo.
/// </param>
/// <param name="inBufferSize">
/// Size, in bytes, of the data InBuffer.
/// </param>
/// <param name="outBuffer">
/// Application-allocated buffer that contains the enumerated information. The format of
/// the information depends on the value of TraceQueryInfoClass. For details, see Remarks.
/// </param>
/// <param name="outBufferSize">
/// Size, in bytes, of the OutBuffer buffer. If the function succeeds, the ReturnLength
/// parameter receives the size of the buffer used. If the buffer is too small, the function
/// returns ERROR_INSUFFICIENT_BUFFER and the ReturnLength parameter receives the required
/// buffer size. If the buffer size is zero on input, no data is returned in the buffer and
/// the ReturnLength parameter receives the required buffer size.
/// </param>
/// <param name="returnLength">
/// Actual size of the data in OutBuffer, in bytes.
/// </param>
/// <returns>
/// If the function succeeds, the return value is ERROR_SUCCESS. If the function fails,
/// the return value is one of the system error codes.
/// </returns>
[DllImport("advapi32.dll")]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern unsafe int EnumerateTraceGuidsEx(
TraceQueryInfoClass traceQueryInfoClass,
void* inBuffer,
int inBufferSize,
void* outBuffer,
int outBufferSize,
ref int returnLength);
/// <summary>
/// P/Invoke declaration for QueryAllTraces.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364102(v=vs.85).aspx"/>
/// <param name="propertyArray">
/// An array of pointers to EVENT_TRACE_PROPERTIES structures that receive session properties and
/// statistics for the event tracing sessions.
/// </param>
/// <param name="propertyArrayCount">
/// Number of structures in the PropertyArray array.
/// </param>
/// <param name="sessionCount">
/// Actual number of event tracing sessions started on the computer.
/// </param>
/// <returns>
/// If the function succeeds, the return value is ERROR_SUCCESS. If the function fails,
/// the return value is one of the system error codes.
/// </returns>
[DllImport("advapi32.dll")]
[SuppressMessage("Microsoft.Security", "CA5122:PInvokesShouldNotBeSafeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
internal static extern unsafe int QueryAllTracesW(
[In][Out] void* propertyArray,
[In] uint propertyArrayCount,
[In][Out] ref uint sessionCount);
/// <summary>
/// Managed version if WNODE_HEADER.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct WnodeHeader
{
/// <summary>
/// Gets the total size of memory allocated, in bytes, for the event tracing
/// model Properties. The size of memory must include the room for the
/// EVENT_TRACE_PROPERTIES structure plus the model name string and log file
/// name string that follow the structure in memory.
/// </summary>
public uint BufferSize;
/// <summary>
/// Gets a value reserved for internal use.
/// </summary>
public uint ProviderId;
/// <summary>
/// Gets, on output, the handle to the event tracing model.
/// </summary>
public ulong HistoricalContext;
/// <summary>
/// Gets the time at which the information in this structure was updated,
/// in 100-nanosecond intervals since midnight, January 1, 1601.
/// </summary>
public ulong TimeStamp;
/// <summary>
/// Gets the GUID of the model.
/// </summary>
public Guid Guid;
/// <summary>
/// Gets the clock resolution to use when logging the time stamp for each
/// event. The default is query performance counter (QPC).
/// </summary>
public EtwSessionClockType ClientContext;
/// <summary>
/// Gets the flags of the model.
/// </summary>
/// <remarks>
/// Must contain WNODE_FLAG_TRACED_GUID to indicate that the structure
/// contains event tracing information.
/// </remarks>
public uint Flags;
}
/// <summary>
/// Managed version of EVENT_TRACE_PROPERTIES. Note that it cannot be used directly with the P/Invoke functions because
/// extra information is added to the end of the struct (see LogFileNameOffset and LoggerNameOffset).
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct EventTraceProperties
{
/// <summary>
/// Gets the WNODE_HEADER structure associated to the trace.
/// </summary>
public WnodeHeader Wnode;
/// <summary>
/// Gets the amount of memory allocated for each event tracing model
/// buffer, in kilobytes.
/// </summary>
public uint BufferSize;
/// <summary>
/// Gets the minimum number of buffers allocated for the event tracing
/// model's buffer pool.
/// </summary>
public uint MinimumBuffers;
/// <summary>
/// Gets the maximum number of buffers allocated for the event tracing
/// model's buffer pool.
/// </summary>
public uint MaximumBuffers;
/// <summary>
/// Gets the maximum size of the file used to log events, in megabytes.
/// </summary>
public uint MaximumFileSize;
/// <summary>
/// Gets the logging file mode for the trace.
/// </summary>
public uint LogFileMode;
/// <summary>
/// Gets the time to wait before flushing buffers, in seconds. If zero,
/// ETW flushes buffers as soon as they become full. If non-zero, ETW
/// flushes all buffers that contain events based on the timer value.
/// </summary>
public uint FlushTimer;
/// <summary>
/// Gets, via bit flags, which events are enabled for a kernel logger model.
/// </summary>
public uint EnableFlags;
/// <summary>
/// Gets a value that is not used by ETW.
/// </summary>
public int AgeLimit;
/// <summary>
/// Gets, on output, the number of buffers allocated for the event tracing
/// model's buffer pool.
/// </summary>
public uint NumberOfBuffers;
/// <summary>
/// Gets, on output, the number of buffers that are allocated but unused
/// in the event tracing model's buffer pool.
/// </summary>
public uint FreeBuffers;
/// <summary>
/// Gets, on output, the number of events that were not recorded.
/// </summary>
public uint EventsLost;
/// <summary>
/// Gets, on output, the number of buffers written.
/// </summary>
public uint BuffersWritten;
/// <summary>
/// Gets, on output, the number of buffers that could not be written
/// to the log file.
/// </summary>
public uint LogBuffersLost;
/// <summary>
/// Gets, on output, the number of buffers that could not be delivered
/// in real-time to the consumer.
/// </summary>
public uint RealTimeBuffersLost;
/// <summary>
/// Gets, on output, the thread identifier for the event tracing model.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2111:PointersShouldNotBeVisible", Justification = "Not accessible to any 3rd-party MS or not")]
public IntPtr LoggerThreadId;
/// <summary>
/// Gets the offset from the start of the structure's allocated memory to
/// beginning of the null-terminated string that contains the log file name.
/// </summary>
public uint LogFileNameOffset;
/// <summary>
/// Gets the offset from the start of the structure's allocated memory to
/// beginning of the null-terminated string that contains the model name.
/// </summary>
public uint LoggerNameOffset;
}
/// <summary>
/// Managed version of ETW_BUFFER_CONTEXT, contains some context information about the ETW buffer in which
/// the event was collected.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
[SuppressMessage("Microsoft.Security", "CA2151:FieldsWithCriticalTypesShouldBeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
public struct EtwBufferContext
{
/// <summary>
/// Gets the number of the CPU on which the provider process was running.
/// The number is zero on a single processor computer.
/// </summary>
public byte ProcessorNumber;
/// <summary>
/// Gets alignment between events (always eight).
/// </summary>
public byte Alignment;
/// <summary>
/// Gets Identifier of the model that logged the event.
/// </summary>
public ushort LoggerId;
}
/// <summary>
/// Simplified managed version of EVENT_HEADER.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
[SuppressMessage("Microsoft.Security", "CA2151:FieldsWithCriticalTypesShouldBeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
public struct EventHeader
{
/// <summary>
/// Gets the size of the event record, in bytes.
/// </summary>
public ushort Size;
/// <summary>
/// Gets the header eventType (reserved).
/// </summary>
public ushort HeaderType;
/// <summary>
/// Gets the flags that provide information about the event such as the eventType
/// of model it was logged to and if the event contains extended data.
/// </summary>
public ushort Flags; // offset: 0x4
/// <summary>
/// Gets the eventType of source to use for parsing the event data.
/// </summary>
public ushort EventProperty;
/// <summary>
/// Gets the thread that generated the event.
/// </summary>
public int ThreadId; // offset: 0x8
/// <summary>
/// Gets the process that generated the event.
/// </summary>
public int ProcessId; // offset: 0xc
/// <summary>
/// Gets the time the event occurred. The resolution depends on the value
/// of the <see href="http://msdn.microsoft.com/en-us/library/aa364160(v=vs.85).aspx">ClientContext</see> of
/// the WNODE_HEADER member of the EVENT_TRACE_PROPERTIES structure when the controller created the session.
/// </summary>
public long TimeStamp; // offset: 0x10
/// <summary>
/// Gets the GUID that uniquely identifies the provider that logged the event.
/// </summary>
public Guid ProviderId; // offset: 0x18
/// <summary>
/// Gets the Id of the event.
/// </summary>
public ushort Id; // offset: 0x28
/// <summary>
/// Gets the version of the event.
/// </summary>
public byte Version; // offset: 0x2a
/// <summary>
/// Gets the channel of the event.
/// </summary>
public byte Channel;
/// <summary>
/// Gets the level of the event.
/// </summary>
public byte Level; // offset: 0x2c
/// <summary>
/// Gets the opcode of the event.
/// </summary>
[SuppressMessage("StyleCop.CSharp.DocumentationRules", "SA1650:ElementDocumentationMustBeSpelledCorrectly", Justification = "Reviewed. Suppression is OK here.")]
public byte Opcode;
/// <summary>
/// Gets the task of the event.
/// </summary>
public ushort Task;
/// <summary>
/// Gets the keyword of the event.
/// </summary>
public ulong Keyword;
/// <summary>
/// Gets the elapsed execution time for kernel-mode instructions, in CPU time units.
/// </summary>
public int KernelTime; // offset: 0x38
/// <summary>
/// Gets the elapsed execution time for user-mode instructions, in CPU time units.
/// </summary>
public int UserTime; // offset: 0x3C
/// <summary>
/// Gets an identifier that relates two events. For details, see EventWriteTransfer.
/// </summary>
public Guid ActivityId;
}
/// <summary>
/// Managed the version of EVENT_RECORD, represents a single modern (a.k.a.: Crimson) ETW event.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
[SuppressMessage("Microsoft.Security", "CA2151:FieldsWithCriticalTypesShouldBeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
public struct EventRecord
{
/// <summary>
/// Gets the header information about the event such as the time stamp for when
/// it was written. For details, see EVENT_HEADER.
/// </summary>
public EventHeader EventHeader; // size: 80
/// <summary>
/// Gets the context information such as the model that logged the event.
/// For details, see ETW_BUFFER_CONTEXT.
/// </summary>
public EtwBufferContext BufferContext; // size: 4
/// <summary>
/// Gets the number of extended data structures in ExtendedData.
/// </summary>
public ushort ExtendedDataCount;
/// <summary>
/// Gets the Size, in bytes, of the data in UserData.
/// </summary>
public ushort UserDataLength; // offset: 86
/// <summary>
/// Gets the extended data items that ETW collects if the controller sets the EnableProperty
/// parameter of EnableTraceEx. For details, see EVENT_HEADER_EXTENDED_DATA_ITEM.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2111:PointersShouldNotBeVisible", Justification = "Not accessible to any 3rd-party MS or not")]
public IntPtr ExtendedData;
/// <summary>
/// Gets the event specific data. To parse this data, see Retrieving Event Data Using TDH.
/// If the Flags member of EVENT_HEADER is EVENT_HEADER_FLAG_STRING_ONLY, the data is a
/// null-terminated Unicode string that you do not need TDH to parse.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2111:PointersShouldNotBeVisible", Justification = "Not accessible to any 3rd-party MS or not")]
public IntPtr UserData;
/// <summary>
/// Gets the context specified in the Context member of the EVENT_TRACE_LOGFILE structure
/// that is passed to OpenTrace.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2111:PointersShouldNotBeVisible", Justification = "Not accessible to any 3rd-party MS or not")]
public IntPtr UserContext;
}
/// <summary>
/// Managed declaration for the native TRACE_ENABLE_INFO structure.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364141(v=vs.85).aspx"/>
[StructLayout(LayoutKind.Sequential)]
public struct TraceEnableInfo
{
/// <summary>
/// Indicates if the provider is enabled to the session. The value is TRUE if the provider
/// is enabled to the session, otherwise, the value is FALSE. This value should always be TRUE.
/// </summary>
public uint IsEnabled;
/// <summary>
/// Level of detail that the session asked the provider to include in the events. For details,
/// see the Level parameter of the EnableTraceEx function.
/// </summary>
public byte Level;
/// <summary>
/// Reserved, do not use.
/// </summary>
public byte Reserved1;
/// <summary>
/// Identifies the session that enabled the provider.
/// </summary>
public ushort LoggerId;
/// <summary>
/// Additional information that the session wants ETW to include in the log file. For details,
/// see the EnableProperty parameter of the EnableTraceEx function.
/// </summary>
public uint EnableProperty;
/// <summary>
/// Reserved, do not use.
/// </summary>
public uint Reserved2;
/// <summary>
/// Keywords specify which events the session wants the provider to write. For details, see the
/// MatchAnyKeyword parameter of the EnableTraceEx function.
/// </summary>
public long MatchAnyKeyword;
/// <summary>
/// Keywords specify which events the session wants the provider to write. For details, see the
/// MatchAllKeyword parameter of the EnableTraceEx function.
/// </summary>
public long MatchAllKeyword;
}
/// <summary>
/// Managed version of TIME_ZONE_INFORMATION. Used as one field of TRACE_EVENT_LOGFILE, below.
/// Total struct size is 0xac.
/// </summary>
[StructLayout(LayoutKind.Sequential, Size = 0xac, CharSet = CharSet.Unicode)]
internal struct TimeZoneInformation
{
/// <summary>
/// Gets the current bias for local time translation on this computer, in
/// minutes. The bias is the difference, in minutes, between Coordinated
/// Universal Time (UTC) and local time.
/// </summary>
internal uint bias;
/// <summary>
/// Gets the description for standard time. For example, "EST" could
/// indicate Eastern Standard Time.
/// </summary>
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
internal string standardName;
/// <summary>
/// Gets the A SYSTEMTIME structure that contains a date and local time
/// when the transition from daylight saving time to standard time occurs
/// on this operating system.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, ArraySubType = UnmanagedType.U2, SizeConst = 8)]
internal ushort[] standardDate;
/// <summary>
/// Gets the bias value to be used during local time translations that
/// occur during standard time. This member is ignored if a value for
/// the StandardDate member is not supplied.
/// </summary>
internal uint standardBias;
/// <summary>
/// Gets a description for daylight saving time. For example, "PDT" could
/// indicate Pacific Daylight Time.
/// </summary>
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
internal string daylightName;
/// <summary>
/// Gets a SYSTEMTIME structure that contains a date and local time when
/// the transition from standard time to daylight saving time occurs on
/// this operating system.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, ArraySubType = UnmanagedType.U2, SizeConst = 8)]
internal ushort[] daylightDate;
/// <summary>
/// Gets the bias value to be used during local time translations that
/// occur during daylight saving time. This member is ignored if a value
/// for the DaylightDate member is not supplied.
/// </summary>
internal uint daylightBias;
}
/// <summary>
/// Managed version of TRACE_LOGFILE_HEADER is used to define EVENT_TRACE_LOGFILEW.
/// Total struct size is 0x110.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct TraceLogfileHeader
{
/// <summary>
/// Gets the size of the event tracing model's buffers, in kilobytes.
/// </summary>
internal uint BufferSize;
/// <summary>
/// Gets the version number of the operating system. This is a roll-up of
/// the members of VersionDetail. Starting with the low-order bytes, the
/// first two bytes contain MajorVersion, the next two bytes contain MinorVersion,
/// the next two bytes contain SubVersion, and the last two bytes contain SubMinorVersion.
/// </summary>
internal uint Version;
/// <summary>
/// Gets the build number of the operating system.
/// </summary>
internal uint ProviderVersion;
/// <summary>
/// Gets the number of processors on the system.
/// </summary>
internal uint NumberOfProcessors;
/// <summary>
/// Gets the time at which the event tracing model stopped, in 100-nanosecond
/// intervals since midnight, January 1, 1601. This value may be 0 if you are
/// consuming events in real time or from a log file to which the provide is
/// still logging events.
/// </summary>
internal long EndTime; // 0x10
/// <summary>
/// Gets the time at which the event tracing model stopped, in 100-nanosecond
/// intervals since midnight, January 1, 1601. This value may be 0 if you are
/// consuming events in real time or from a log file to which the provide is
/// still logging events.
/// </summary>
internal uint TimerResolution;
/// <summary>
/// Gets the maximum size of the log file, in megabytes.
/// </summary>
internal uint MaximumFileSize;
/// <summary>
/// Gets the current logging mode for the event tracing model.
/// </summary>
internal uint LogFileMode;
/// <summary>
/// Gets the total number of buffers written by the event tracing model.
/// </summary>
internal uint BuffersWritten;
/// <summary>
/// Gets a reserved value.
/// </summary>
internal uint StartBuffers;
/// <summary>
/// Gets the size of a pointer data eventType, in bytes.
/// </summary>
internal uint PointerSize;
/// <summary>
/// Gets the number of events lost during the event tracing model. Events
/// may be lost due to insufficient memory or a very high rate of incoming
/// events.
/// </summary>
internal uint EventsLost; // 0x30
/// <summary>
/// Gets the CPU speed, in MHz.
/// </summary>
internal uint CpuSpeedInMHz;
/// <summary>
/// Gets a value that is not used (present only to keep the struct layout).
/// </summary>
internal IntPtr LoggerName; // string, but not CoTaskMemAlloc'd
/// <summary>
/// Gets a value that is not used (present only to keep the struct layout).
/// </summary>
internal IntPtr LogFileName; // string, but not CoTaskMemAlloc'd
/// <summary>
/// Gets A TIME_ZONE_INFORMATION structure that contains the time zone
/// for the BootTime, EndTime and StartTime members.
/// </summary>
internal TimeZoneInformation TimeZone; // 0x40 0xac size
/// <summary>
/// Gets the time at which the system was started, in 100-nanosecond intervals
/// since midnight, January 1, 1601. BootTime is supported only for traces
/// written to the Global Logger model.
/// </summary>
internal long BootTime;
/// <summary>
/// Gets the frequency of the high-resolution performance counter, if one exists.
/// </summary>
internal long PerfFreq;
/// <summary>
/// Gets the time at which the event tracing model started, in 100-nanosecond
/// intervals since midnight, January 1, 1601.
/// </summary>
internal long StartTime;
/// <summary>
/// Gets the the clock eventType. For details, see the ClientContext member of WNODE_HEADER.
/// </summary>
internal uint ReservedFlags;
/// <summary>
/// Gets the total number of buffers lost during the event tracing model.
/// </summary>
internal uint BuffersLost;
}
/// <summary>
/// Managed version of EVENT_TRACE_HEADER, represents the common header of all ETW events.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct EventTraceHeader
{
/// <summary>
/// Gets the total number of bytes of the event. Size includes the size
/// of the header structure, plus the size of any event-specific data
/// appended to the header.
/// </summary>
internal ushort Size;
/// <summary>
/// Gets a reserved value.
/// </summary>
internal ushort FieldTypeFlags; // holds our MarkerFlags too
/// <summary>
/// Gets the eventType of event. A provider can define their own event types
/// or use the predefined event types.
/// </summary>
internal byte Type;
/// <summary>
/// Gets the provider-defined value that defines the severity level used
/// to generate the event. The value ranges from 0 to 255.
/// </summary>
internal byte Level;
/// <summary>
/// Gets the version of the event trace class that you are using to log
/// the event. Specify zero if there is only one version of your event
/// trace class. The version tells the consumer which MOF class to use
/// to decipher the event data.
/// </summary>
internal ushort Version;
/// <summary>
/// Gets the id of the thread that generated the event.
/// </summary>
internal int ThreadId;
/// <summary>
/// Gets the id of the process that generated the event.
/// </summary>
internal int ProcessId;
/// <summary>
/// Gets the time the event occurred. The resolution depends on the value
/// of the <see href="http://msdn.microsoft.com/en-us/library/aa364160(v=vs.85).aspx">ClientContext</see> of
/// the WNODE_HEADER member of the EVENT_TRACE_PROPERTIES structure when the controller created the session.
/// </summary>
internal long TimeStamp; // Offset 0x10
/// <summary>
/// Gets the Event trace class GUID. You can use the class GUID to identify
/// a category of events and the Class.EventType member to identify an event within
/// the category of events.
/// </summary>
internal Guid Guid;
/// <summary>
/// Gets the elapsed execution time for kernel-mode instructions, in CPU time
/// units. If you are using a private model, use the value in the ProcessorTime
/// member instead.
/// </summary>
internal int KernelTime; // Offset 0x28
/// <summary>
/// Gets the elapsed execution time for user-mode instructions, in CPU time units.
/// If you are using a private model, use the value in the ProcessorTime member
/// instead.
/// </summary>
internal int UserTime;
}
/// <summary>
/// Managed version of EVENT_TRACE, it represents a single ETW event.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct EventTrace
{
/// <summary>
/// Gets an EVENT_TRACE_HEADER structure that contains standard event
/// tracing information.
/// </summary>
internal EventTraceHeader Header;
/// <summary>
/// Gets the instance identifier. Contains valid data when the provider
/// calls the TraceEventInstance function to generate the event.
/// Otherwise, the value is zero.
/// </summary>
internal uint InstanceId;
/// <summary>
/// Gets the instance identifier for a parent event. Contains valid data
/// when the provider calls the TraceEventInstance function to generate
/// the event. Otherwise, the value is zero.
/// </summary>
internal uint ParentInstanceId;
/// <summary>
/// Gets the GUID of the parent event. Contains valid data when the provider
/// calls the TraceEventInstance function to generate the event.
/// Otherwise, the value is zero.
/// </summary>
internal Guid ParentGuid;
/// <summary>
/// Gets the pointer to the beginning of the event-specific data for this event.
/// </summary>
internal IntPtr MofData; // PVOID
/// <summary>
/// Gets the number of bytes pointed by <see cref="MofData"/>.
/// </summary>
internal int MofLength;
/// <summary>
/// Gets information about the event such as the model identifier and
/// processor number of the CPU on which the provider process ran.
/// </summary>
internal EtwBufferContext BufferContext;
}
/// <summary>
/// Managed version of EVENT_TRACE_LOGFILEW. This is the main struct passed to OpenTrace().
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
internal struct EventTraceLogfilew
{
/// <summary>
/// Gets the name of the log file used by the event tracing model.
/// </summary>
[MarshalAs(UnmanagedType.LPWStr)]
internal string LogFileName;
/// <summary>
/// Gets the name of the event tracing model.
/// </summary>
[MarshalAs(UnmanagedType.LPWStr)]
internal string LoggerName;
/// <summary>
/// Gets the current time, in 100-nanosecond intervals since midnight, January 1, 1601.
/// </summary>
internal long CurrentTime;
/// <summary>
/// Gets the number of buffers processed.
/// </summary>
internal uint BuffersRead;
/// <summary>
/// Gets the log file mode (real time or trace) to be used.
/// </summary>
internal uint LogFileMode;
/// <summary>
/// Gets, on output, an EVENT_TRACE structure that contains the last event processed.
/// </summary>
/// <remarks>
/// EVENT_TRACE for the current event. Nulled-out when we are opening files.
/// </remarks>
internal EventTrace CurrentEvent;
/// <summary>
/// Gets, on output, a TRACE_LOGFILE_HEADER structure that contains general
/// information about the model and the computer on which the model ran.
/// </summary>
internal TraceLogfileHeader LogfileHeader;
/// <summary>
/// Gets the pointer to the BufferCallback function that receives buffer-related
/// statistics for each buffer ETW flushes. ETW calls this callback after it delivers
/// all the events in the buffer. This callback is optional.
/// </summary>
internal EventTraceBufferCallback BufferCallback;
/// <summary>
/// Gets, on output, the size of each buffer, in bytes.
/// </summary>
internal int BufferSize;
/// <summary>
/// Gets, on output, contains the number of bytes in the buffer that contain valid information.
/// </summary>
internal int Filled;
/// <summary>
/// Gets the number of lost events. Currently not used by ETW.
/// </summary>
internal int EventsLost;
/// <summary>
/// Gets the pointer to the EventCallback function that ETW calls for each event in the buffer.
/// </summary>
internal EventRecordCallback EventCallback;
/// <summary>
/// Gets a value indicating whether this is a kernel trace or not.
/// </summary>
internal int IsKernelTrace;
/// <summary>
/// Gets the context data that a consumer can specify when calling OpenTrace.
/// </summary>
internal IntPtr Context;
}
/// <summary>
/// Managed declaration for the native TRACE_GUID_INFO structure.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364142(v=vs.85).aspx"/>
[StructLayout(LayoutKind.Sequential)]
internal struct TraceGuidInfo
{
/// <summary>
/// The number of TRACE_PROVIDER_INSTANCE_INFO blocks contained in the list. You can have
/// multiple instances of the same provider if the provider lives in a DLL that is loaded
/// by multiple processes.
/// </summary>
internal uint InstanceCount;
/// <summary>
/// Reserved, do not use.
/// </summary>
internal uint Reserved;
}
/// <summary>
/// Managed declaration for the native TRACE_PROVIDER_INSTANCE_INFO structure.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364146(v=vs.85).aspx"/>
[StructLayout(LayoutKind.Sequential)]
internal struct TraceProviderInstanceInfo
{
/// <summary>
/// Offset, in bytes, from the beginning of this structure to the next TRACE_PROVIDER_INSTANCE_INFO
/// structure. The value is zero if there is not another instance info block.
/// </summary>
internal uint NextOffset;
/// <summary>
/// Number of TRACE_ENABLE_INFO structures in this block. Each structure represents a session that
/// enabled the provider.
/// </summary>
internal uint EnableCount;
/// <summary>
/// Process identifier of the process that registered the provider.
/// </summary>
internal uint Pid;
/// <summary>
/// Can be one of the following flags TRACE_PROVIDER_FLAG_LEGACY, i.e.: The provider used
/// RegisterTraceGuids instead of EventRegister to register itself, or
/// TRACE_PROVIDER_FLAG_PRE_ENABLE, i.e.: The provider is not registered; however, one or
/// more sessions have enabled the provider.
/// </summary>
internal uint Flags;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SelectionClauseV3.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
/// <summary>
/// Used to determine how many results should be returned from the query, how they should be ordered, and what criteria to used to determine
/// which series are included.
/// </summary>
public sealed class SelectionClauseV3
{
/// <summary>
/// Initializes a new instance of the <see cref="SelectionClauseV3"/> class.
/// </summary>
/// <param name="propertyDefinition">The property used .</param>
/// <param name="numberOfResultsToReturn">The number of results to return.</param>
/// <param name="orderBy">The ordering of the selection.</param>
public SelectionClauseV3(PropertyDefinition propertyDefinition, int numberOfResultsToReturn, OrderBy orderBy)
{
this.PropertyDefinition = propertyDefinition;
this.NumberOfResultsToReturn = numberOfResultsToReturn;
this.OrderBy = orderBy;
}
/// <summary>
/// Defines which sampling type data is used to determine the top series.
/// </summary>
public PropertyDefinition PropertyDefinition { get; }
/// <summary>
/// Gets the number of time series to return.
/// </summary>
public int NumberOfResultsToReturn { get; }
/// <summary>
/// Gets the ordering of the selection
/// </summary>
public OrderBy OrderBy { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ILocalMetricReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// The interface for consumption of locally aggregated metrics or local raw metrics.
/// </summary>
public interface ILocalMetricReader
{
/// <summary>
/// Reads the local raw metrics.
/// </summary>
/// <param name="metricProducedAction">The action to execute when metric available.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="etlFileName">The name of the etw file from when read data. If null, realtime session will be used.</param>
/// <returns>An awaitable <see cref="Task"/>.</returns>
Task ReadLocalRawMetricsAsync(
Action<ILocalRawMetric> metricProducedAction,
CancellationToken cancellationToken,
string etlFileName = null);
/// <summary>
/// Reads the locally aggregated metrics.
/// </summary>
/// <param name="metricProducedAction">The action to execute when metric available.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="etlFileName">The name of the etw file from when read data. If null, realtime session will be used.</param>
/// <returns>An awaitable <see cref="Task"/>.</returns>
Task ReadLocalAggregatedMetricsAsync(
Action<ILocalAggregatedMetric> metricProducedAction,
CancellationToken cancellationToken,
string etlFileName = null);
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricEnrichmentRule.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricEnrichmentRuleManagement
{
using System;
using System.Collections.Generic;
/// <summary>
/// Represents a metric enrichment rule.
/// </summary>
/// <remarks>
/// TODO: Add a metric enrichment rule builder once we are ready to announce this to customer.
/// </remarks>
public sealed class MetricEnrichmentRule
{
/// <summary>
/// Initializes a new instance of the <see cref="MetricEnrichmentRule"/> class.
/// </summary>
/// <param name="stampId">The stamp identifier this rule belongs too.</param>
/// <param name="monitoringAccountFilter">
/// The monitoring account filter to be used for matching applicable rules for a given monitoring account.
/// Use * to represent Wild card filter.
/// </param>
/// <param name="metricNamespaceFilter">
/// The metric namespace filter to be used for matching applicable rules for a given metric namespace.
/// Use * to represent Wild card filter.
/// </param>
/// <param name="metricNameFilter">
/// The metric name filter to be used for matching applicable rules for a given metric name.
/// Use * to represent Wild card filter.
/// </param>
/// <param name="transformations">Represents the transformations associated with this rule..</param>
public MetricEnrichmentRule(string stampId, string monitoringAccountFilter, string metricNamespaceFilter, string metricNameFilter, List<MetricEnrichmentRuleTransformationDefinition> transformations)
{
if (string.IsNullOrEmpty(stampId))
{
throw new ArgumentNullException(nameof(stampId));
}
if (string.IsNullOrEmpty(monitoringAccountFilter))
{
throw new ArgumentNullException(nameof(monitoringAccountFilter));
}
if (string.IsNullOrEmpty(metricNamespaceFilter))
{
throw new ArgumentNullException(nameof(metricNamespaceFilter));
}
if (string.IsNullOrEmpty(metricNameFilter))
{
throw new ArgumentNullException(nameof(metricNameFilter));
}
this.StampId = stampId;
this.MonitoringAccountFilter = monitoringAccountFilter;
this.MetricNamespaceFilter = metricNamespaceFilter;
this.MetricNameFilter = metricNameFilter;
this.Transformations = transformations;
}
/// <summary>
/// Gets the stamp identifier this rule belongs too.
/// </summary>
public string StampId { get; private set; }
/// <summary>
/// Gets the filter to be used for matching applicable rules for a given monitoring account.
/// </summary>
/// <remarks>
/// Use * to represent Wild card filter.
/// </remarks>
public string MonitoringAccountFilter { get; private set; }
/// <summary>
/// Gets the filter to be used for matching applicable rules for a given metric namespace.
/// </summary>
/// <remarks>
/// Use * to represent Wild card filter.
/// </remarks>
public string MetricNamespaceFilter { get; private set; }
/// <summary>
/// Gets the filter to be used for matching applicable rules for a given metric name.
/// </summary>
/// <remarks>
/// Use * to represent Wild card filter.
/// </remarks>
public string MetricNameFilter { get; private set; }
/// <summary>
/// Represents the transformations associated with this rule.
/// </summary>
public List<MetricEnrichmentRuleTransformationDefinition> Transformations { get; private set; }
/// <summary>
/// Validates the data is valid rule.
/// </summary>
/// <returns>
/// Validation failure message, empty means validation passed
/// </returns>
internal string Validate()
{
if (string.IsNullOrEmpty(this.StampId))
{
return "Stamp id cannot be null";
}
if (string.IsNullOrEmpty(this.MonitoringAccountFilter))
{
return "MonitoringAccountFilter cannot be null";
}
if (string.IsNullOrEmpty(this.MetricNamespaceFilter))
{
return "MetricNamespaceFilter cannot be null";
}
if (string.IsNullOrEmpty(this.MetricNameFilter))
{
return "MetricNameFilter cannot be null";
}
if (this.Transformations == null || this.Transformations.Count == 0)
{
return "Transformations cannot be null or empty";
}
foreach (var transformation in this.Transformations)
{
var validationFailureMessage = transformation.Validate();
if (!string.IsNullOrEmpty(validationFailureMessage))
{
return validationFailureMessage;
}
}
return string.Empty;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="VersionComparer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System.Linq;
using Microsoft.Cloud.Metrics.Client.Configuration;
using Microsoft.Cloud.Metrics.Client.Logging;
using Newtonsoft.Json;
/// <summary>
/// The comparer class for configuration versions.
/// </summary>
internal static class VersionComparer
{
private static readonly object LogId = Logger.CreateCustomLogId("VersionComparer");
/// <summary>
/// Compares the metrics version with server.
/// </summary>
/// <param name="metricConfigFromFile">The metric configuration from file.</param>
/// <param name="metricConfigurationOnServer">The metric configuration on server.</param>
/// <returns>
/// 1 if configuration from file has a higher version number, -1 if lower, and 0 if identical.
/// </returns>
internal static int CompareMetricsVersionWithServer(IMetricConfiguration metricConfigFromFile, IMetricConfiguration metricConfigurationOnServer)
{
const string logTag = "CompareMetricsVersionWithServer";
if (metricConfigurationOnServer == null)
{
return 1;
}
if (metricConfigFromFile.Version == metricConfigurationOnServer.Version)
{
Logger.Log(
LoggerLevel.Warning,
LogId,
logTag,
"The version in the file is the same as the one on the server. Skip.");
return 0;
}
if (metricConfigFromFile.Version < metricConfigurationOnServer.Version)
{
Logger.Log(
LoggerLevel.Warning,
LogId,
logTag,
$"The version {metricConfigFromFile.Version} in the file is less than the one {metricConfigurationOnServer.Version} on the server! Please download the latest version first. Skip.");
return -1;
}
return 1;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IMonitoringAccountAcls.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System.Collections.Generic;
/// <summary>
/// Represents a list of ACLs associated with a monitoring account.
/// </summary>
/// <remarks>
/// This does not include AP PKI as the cert is automatically generated and the ACL itself is not needed client side.
/// </remarks>
internal interface IMonitoringAccountAcls
{
/// <summary>
/// Gets the thumbprints.
/// </summary>
List<string> Thumbprints { get; }
/// <summary>
/// Gets the dSMS acls.
/// </summary>
List<string> DsmsAcls { get; }
/// <summary>
/// Gets the KeyVault acls.
/// </summary>
List<string> KeyVaultAcls { get; }
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="ClientAssemblyMigrationSerializationBinder.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.Linq;
using Newtonsoft.Json.Serialization;
/// <summary>
/// Binds the client assembly to an assembly known to the server.
/// </summary>
internal class ClientAssemblyMigrationSerializationBinder : DefaultSerializationBinder
{
private readonly ClientAssemblyMigration[] migrations;
/// <summary>
/// Initializes a new instance of the <see cref="ClientAssemblyMigrationSerializationBinder"/> class.
/// </summary>
/// <param name="migrations">The migrations.</param>
public ClientAssemblyMigrationSerializationBinder(ClientAssemblyMigration[] migrations)
{
this.migrations = migrations;
}
/// <summary>
/// Determines what type the serialized data should be bound to.
/// </summary>
/// <param name="assemblyName">Name of the assembly.</param>
/// <param name="typeName">Name of the type.</param>
/// <returns>The type to bind the data to.</returns>
public override Type BindToType(string assemblyName, string typeName)
{
var migration = this.migrations.SingleOrDefault(p => p.FromAssembly == assemblyName && p.FromType == typeName);
if (migration != null)
{
return migration.ToType;
}
return base.BindToType(assemblyName, typeName);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="UserPermissionV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using Newtonsoft.Json;
/// <summary>
/// User with access to MDM.
/// </summary>
public sealed class UserPermissionV2 : IPermissionV2
{
/// <summary>
/// Initializes a new instance of the <see cref="UserPermissionV2"/> class.
/// Create a new user for MDM access.
/// </summary>
/// <param name="identity">The alias of the user.</param>
/// <param name="roleConfiguration">The role assigned to this user.</param>
[JsonConstructor]
public UserPermissionV2(string identity, RoleConfiguration roleConfiguration)
{
this.Identity = identity;
this.Description = null;
this.RoleConfiguration = roleConfiguration;
}
/// <summary>
/// The identity to grant permission.
/// </summary>
public string Identity { get; }
/// <inheritdoc />
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public string Description { get; }
/// <summary>
/// The level of access to be granted to this identity.
/// </summary>
public RoleConfiguration RoleConfiguration { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SegmentPurpose.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Metrics.Services.Common.BlobSegment
{
/// <summary>
/// The purpose of a segment - used for metric reporting to better understand the memory usage patterns.
/// Also for debug mode it is possible to store the segment purpose within the segment and check
/// if wrong segment is released during release operations.
/// The values MUST start with 0 and increment by 1 (this assumption is made by BlobSegmentPool.PoolStatistics).
/// the value of BlobSegment.ReleasedSegment cannot be used as SegmentPurpose value
/// </summary>
public enum SegmentPurpose
{
/// <summary>
/// The segment purpose is undefined.
/// </summary>
Undefined = 0,
/// <summary>
/// The segment is created to hold the incoming histogram.
/// </summary>
IncomingHistogram = 1,
/// <summary>
/// The segment is created to hold the merged histogram arena.
/// </summary>
MergedHistogramArena = 2,
/// <summary>
/// The segment holds the histogram in CST storage.
/// </summary>
HistogramForStorage = 3,
/// <summary>
/// The segment is created during the blob growth during write operation.
/// </summary>
GrowOnWritePastTheEndOfBlob = 4,
/// <summary>
/// The segment that is part of a clone of existing blob (request for data from storage that returns the copy of the data)
/// </summary>
BlobClone = 5,
/// <summary>
/// The segment that was read from disk file that contains histograms
/// </summary>
DiskHistogram = 6,
/// <summary>
/// The segment that was read from Azure Table Storage as a result of the query
/// </summary>
AzureTableStorageQueryHistogram = 7,
/// <summary>
/// The segment that was created for uploading data to Azure Table Storage.
/// </summary>
AzureTableStorageUploadHistogram = 8,
/// <summary>
/// The segment that was created during histograms aggregation on FE or FTA.
/// </summary>
AggregatedHistogram = 9,
/// <summary>
/// The segment that was created during the query-time histogram merge.
/// </summary>
QueryTimeMergeHistogram = 10,
/// <summary>
/// The segment that was created during list of buckets conversion.
/// </summary>
ListToBlobConversion = 11,
/// <summary>
/// The segment used to read encoded rollup data and then decode it when serving a query from rollups.
/// </summary>
RollupDataDecoding = 12,
}
}
<file_sep>//-----------------------------------------------------------------------
// <copyright file="AccountNotFoundException.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
/// <summary>
/// Exception thrown when account object cannot be found.
/// </summary>
public sealed class AccountNotFoundException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="AccountNotFoundException"/> class.
/// </summary>
/// <param name="message">Message describing exception situation.</param>
/// <param name="innerException">Inner exception which caused exception situation.</param>
public AccountNotFoundException(string message, Exception innerException)
: base(message, innerException)
{
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="TraceBufferContext.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Type that wraps access to the buffer context fields of an ETW event record.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Diagnostics.CodeAnalysis;
/// <summary>
/// Type that wraps access to the buffer context fields of an ETW event record.
/// </summary>
internal unsafe struct TraceBufferContext
{
/// <summary>
/// Pointer to the native structure being wrapped.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2151:FieldsWithCriticalTypesShouldBeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
private readonly NativeMethods.EtwBufferContext* bufferContext;
/// <summary>
/// Initializes a new instance of the <see cref="TraceBufferContext"/> struct.
/// </summary>
/// <param name="bufferContext">
/// Pointer to the native structure being wrapped by the instance.
/// </param>
public TraceBufferContext(NativeMethods.EtwBufferContext* bufferContext)
{
if (bufferContext == null)
{
throw new ArgumentNullException("bufferContext");
}
this.bufferContext = bufferContext;
}
/// <summary>
/// Gets the number of the CPU on which the provider process was running.
/// The number is zero on a single processor computer.
/// </summary>
public byte ProcessorNumber
{
get
{
return this.bufferContext->ProcessorNumber;
}
}
/// <summary>
/// Gets alignment between events (always eight).
/// </summary>
public byte Alignment
{
get
{
return this.bufferContext->Alignment;
}
}
/// <summary>
/// Gets Identifier of the model that logged the event.
/// </summary>
public ushort LoggerId
{
get
{
return this.bufferContext->LoggerId;
}
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="ComputedSamplingTypeConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// Represents the configuration of computed sampling type.
/// </summary>
public class ComputedSamplingTypeConfiguration
{
/// <summary>
/// This is the expression used by the build-in "Average" sampling type.
/// </summary>
public const string AverageExpression = @"raw.Sum / (raw.Count || 1)";
/// <summary>
/// This is the expression used by the build-in "NullableAverage" sampling type.
/// </summary>
public const string NullableAverageExpression = @"raw.Count ? (raw.Sum / raw.Count) : null";
/// <summary>
/// This is the expression used by the build-in "Rate" sampling type.
/// </summary>
public const string RateExpression = @"raw.Sum / 60";
/// <summary>
/// This is the expression used by the build-in "Standard deviation" sampling type.
/// </summary>
public const string StandardDeviationExpression = @"raw.Count < 2 ? null : Math.sqrt(raw.SumOfSquareDiffFromMean / raw.Count)";
/// <summary>
/// Gets or sets the friendly name for the computed metric, e.g.: Successful calls, CPU utilization, etc.
/// </summary>
public string Name { get; set; }
/// <summary>
/// Gets or sets the JavaScript expression used to produce this metric from raw values.
/// </summary>
public string Expression { get; set; }
/// <summary>
/// Gets or sets the engine to be used to evaluate the expression. If it is null, empty or blank
/// the engine defaults to the one configured for the appliucation.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public string ExpressionEngine { get; set; }
/// <summary>
/// Gets or sets the number suffix.
/// </summary>
/// <remarks>
/// This is the unit to show in UI for the data points, such as seconds, ms, %, or any free text that make sense.
/// </remarks>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public string Unit { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the sampling type is added by the system.
/// </summary>
/// <remarks>
/// These sampling type configurations cannot be deleted or modified.
/// </remarks>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool IsBuiltIn { get; set; }
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="EventSourceLogEngine.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Logging
{
using System;
using System.Diagnostics.Tracing;
using System.Globalization;
/// <summary>
/// Log engine that will emit event source based messages.
/// </summary>
/// <remarks>
/// Class is intentionally left public to allow for invocation of the GenerateManifest method if needed manifested ETW event consumption.
/// </remarks>
[EventSource(Name = "Microsoft-MDMetricsClient", Guid = "{FEB9BEAF-6D93-442E-BB78-7F581B618201}")]
public sealed class EventSourceLogEngine : EventSource, ILogEngine
{
/// <summary>
/// The logger instance
/// </summary>
private static readonly Lazy<EventSourceLogEngine> Instance = new Lazy<EventSourceLogEngine>(() => new EventSourceLogEngine());
/// <summary>
/// Prevents a default instance of the <see cref="EventSourceLogEngine"/> class from being created.
/// </summary>
private EventSourceLogEngine()
{
// Do nothing.
}
/// <summary>
/// Gets the logger instance.
/// </summary>
public static EventSourceLogEngine Logger
{
get { return Instance.Value; }
}
/// <summary>
/// Logs the given data according to the engine implementation.
/// </summary>
/// <param name="level">Level of the log statement.</param>
/// <param name="logId">Log identification for classifying log statements.</param>
/// <param name="tag">Extra string that allows another level of classification under the log id.</param>
/// <param name="format">Message to be logged, it can be a format message.</param>
/// <param name="objectParams">Optional, any parameter to be used to build the formatted message string.</param>
public void Log(LoggerLevel level, object logId, string tag, string format, params object[] objectParams)
{
if (this.IsLogged(level, logId, tag))
{
var intermediateFormat = string.Format(
CultureInfo.InvariantCulture,
"Level=[{0}] LogId=[{1}] Tag=[{2}] {3}",
level,
logId,
tag,
format);
var finalMessage = string.Format(CultureInfo.InvariantCulture, intermediateFormat, objectParams);
switch (level)
{
case LoggerLevel.Debug:
this.EventSourceLogDebug(finalMessage);
break;
case LoggerLevel.Info:
this.EventSourceLogInfo(finalMessage);
break;
case LoggerLevel.Warning:
this.EventSourceLogWarning(finalMessage);
break;
case LoggerLevel.Error:
this.EventSourceLogError(finalMessage);
break;
case LoggerLevel.CustomerFacingInfo:
this.EventSourceLogInfo(finalMessage);
break;
}
}
}
/// <summary>
/// Logs the message via event source at debug level.
/// </summary>
/// <param name="message">The message.</param>
[Event(1, Level = EventLevel.Verbose)]
public void EventSourceLogDebug(string message)
{
this.WriteEvent(1, message);
}
/// <summary>
/// Logs the message via event source at info level.
/// </summary>
/// <param name="message">The message.</param>
[Event(2, Level = EventLevel.Informational)]
public void EventSourceLogInfo(string message)
{
this.WriteEvent(2, message);
}
/// <summary>
/// Logs the message via event source at warning level.
/// </summary>
/// <param name="message">The message.</param>
[Event(3, Level = EventLevel.Warning)]
public void EventSourceLogWarning(string message)
{
this.WriteEvent(3, message);
}
/// <summary>
/// Logs the message via event source at error level.
/// </summary>
/// <param name="message">The message.</param>
[Event(4, Level = EventLevel.Error)]
public void EventSourceLogError(string message)
{
this.WriteEvent(4, message);
}
/// <summary>
/// Checks if a log statement with the given parameters will be actually logged or
/// not. Useful to avoid expensive operations for log statements that are going to
/// be dropped by the log engine.
/// </summary>
/// <param name="level">Level of the log statement.</param>
/// <param name="logId">Log identification for classifying log statements.</param>
/// <param name="tag">Extra string that allows another level of classification under the log id.</param>
/// <returns>
/// True if the statement is going to be logged, false otherwise.
/// </returns>
public bool IsLogged(LoggerLevel level, object logId, string tag)
{
return this.IsEnabled(this.GetEtwLevelFromLogLevel(level), EventKeywords.None);
}
/// <summary>
/// Gets the ETW level from log level.
/// </summary>
/// <param name="level">The log level.</param>
/// <returns>The ETW level</returns>
private EventLevel GetEtwLevelFromLogLevel(LoggerLevel level)
{
switch (level)
{
case LoggerLevel.Debug:
return EventLevel.Verbose;
case LoggerLevel.Info:
return EventLevel.Informational;
case LoggerLevel.Warning:
return EventLevel.Warning;
case LoggerLevel.Error:
return EventLevel.Error;
}
return EventLevel.Informational;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="DoubleValueSerializer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.IO;
using System.Runtime.CompilerServices;
using Metrics.Serialization.BitHelper;
/// <summary>
/// This class provides set of methods to serialize double values using gorilla paper algorithm.
/// </summary>
public sealed class DoubleValueSerializer
{
private const int NumBitsToEncodeNumLeadingZeros = 5;
private const int NumBitsToEncodeNumMeaningfulBits = 6;
private const int MaxLeadingZerosLength = (1 << NumBitsToEncodeNumLeadingZeros) - 1;
private static readonly double[] EmptyDoubleArray = new double[0];
private static readonly double?[] EmptyNullableDoubleArray = new double?[0];
/// <summary>
/// Serializes the specified <paramref name="values"/>.
/// </summary>
/// <param name="writer">The writer.</param>
/// <param name="values">The values to serialize.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Serialize(BinaryWriter writer, double[] values)
{
unsafe
{
if (values.Length > 0)
{
fixed (double* p = &values[0])
{
Serialize(writer, p, values.Length);
}
}
else
{
Serialize(writer, null, 0);
}
}
}
/// <summary>
/// Serializes the specified <paramref name="values"/>.
/// </summary>
/// <param name="writer">The writer.</param>
/// <param name="values">The pointer to the array of values to serialize.</param>
/// <param name="count">The number of elements in values array.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe void Serialize(BinaryWriter writer, double* values, int count)
{
// Reserve one byte for future versioning.
writer.Write((byte)1);
SerializationUtils.WriteUInt32AsBase128(writer, (uint)count);
if (count > 0)
{
BitBinaryWriter bitWriter = new BitBinaryWriter(writer);
var previousState = new DoubleValueState(0, -1, -1);
for (int i = 0; i < count; ++i)
{
DoubleValueState newState;
WriteDouble(bitWriter, values[i], previousState, out newState);
previousState = newState;
}
bitWriter.Flush();
}
}
/// <summary>
/// Deserializes to an array of <see lang="double"/> from the specified reader.
/// </summary>
/// <param name="reader">The reader.</param>
/// <returns>An array of <see lang="double"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static double[] Deserialize(BinaryReader reader)
{
// version not in use yet.
reader.ReadByte();
var numOfItems = (int)SerializationUtils.ReadUInt32FromBase128(reader);
if (numOfItems == 0)
{
return EmptyDoubleArray;
}
var result = new double[numOfItems];
unsafe
{
fixed (double* p = &result[0])
{
DeserializeValues(reader, p, numOfItems);
}
}
return result;
}
/// <summary>
/// Deserializes to an array of <see lang="double"/> from the specified reader.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="values">The pointer to the array to store the result values.</param>
/// <param name="expectedCount">The number of elements in values array, this value should match the serialized array size.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe void Deserialize(BinaryReader reader, double* values, int expectedCount)
{
// version not in use yet.
reader.ReadByte();
var count = (int)SerializationUtils.ReadUInt32FromBase128(reader);
if (count != expectedCount)
{
throw new InvalidDataException($"Wrong count in serialized data: expected {expectedCount}, but was {count}");
}
DeserializeValues(reader, values, count);
}
/// <summary>
/// Deserializes to an array of nullable <see lang="double"/> from the specified reader.
/// </summary>
/// <param name="reader">The reader.</param>
/// <returns>An array of nullable <see lang="double"/>.</returns>
public static double?[] DeserializeToNullableDoubles(BinaryReader reader)
{
// version not in use yet.
reader.ReadByte();
var numOfItems = (int)SerializationUtils.ReadUInt32FromBase128(reader);
if (numOfItems == 0)
{
return EmptyNullableDoubleArray;
}
var result = new double?[numOfItems];
var bitBinaryReader = new BitBinaryReader(reader);
var previousState = new DoubleValueState(0, -1, -1);
for (int i = 0; i < numOfItems; ++i)
{
var newState = ReadDouble(bitBinaryReader, previousState);
result[i] = double.IsNaN(newState.Value) ? (double?)null : newState.Value;
previousState = newState;
}
return result;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void DeserializeValues(BinaryReader reader, double* values, int count)
{
var bitBinaryReader = new BitBinaryReader(reader);
var previousState = new DoubleValueState(0, -1, -1);
for (int i = 0; i < count; ++i)
{
var newState = ReadDouble(bitBinaryReader, previousState);
values[i] = newState.Value;
previousState = newState;
}
}
/// <summary>
/// Encodes the given double value to the given writer.
/// </summary>
/// <param name="writer">The writer into which value needs to be encoded.</param>
/// <param name="value">The value to be encoded.</param>
/// <param name="previousValue">The previous value state.</param>
/// <param name="newValue">The new value state.</param>
/// <returns>True if encoding was success else false.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static bool WriteDouble(BitBinaryWriter writer, double value, DoubleValueState previousValue, out DoubleValueState newValue)
{
newValue = previousValue;
newValue.Value = value;
var xor = BitConverter.DoubleToInt64Bits(value) ^ BitConverter.DoubleToInt64Bits(previousValue.Value);
if (xor == 0)
{
writer.WriteBit(false);
}
else
{
writer.WriteBit(true);
var leadingZeros = (sbyte)BitAggregateMagic.CountLeadingZeros(xor);
var trailingZeros = (sbyte)BitAggregateMagic.CountTrailingZeros(xor);
if (leadingZeros > MaxLeadingZerosLength)
{
leadingZeros = MaxLeadingZerosLength;
}
int blockSize = 64 - leadingZeros - trailingZeros;
int expectedSize = NumBitsToEncodeNumLeadingZeros + NumBitsToEncodeNumMeaningfulBits + blockSize;
int previousBlockInformationSize = 64 - previousValue.TrailingZeros - previousValue.LeadingZeros;
// The block position is set by the first non-zero XOR value. previousValue.LeadingZeros was initialized to -1s to start with.
if (previousValue.LeadingZeros > 0 && leadingZeros >= previousValue.LeadingZeros && trailingZeros >= previousValue.TrailingZeros && previousBlockInformationSize < expectedSize)
{
writer.WriteBit(false);
// there are at least as many leading zeros and as many trailing zeros as with the previous value, reuse the block position.
var numMeaningfulBits = BitAggregateMagic.NumBitsInLongInteger - previousValue.LeadingZeros - previousValue.TrailingZeros;
writer.WriteBits(xor, numMeaningfulBits, previousValue.TrailingZeros);
}
else
{
// start a new block position
writer.WriteBit(true);
writer.WriteBits(leadingZeros, NumBitsToEncodeNumLeadingZeros, 0);
newValue.LeadingZeros = leadingZeros;
var numMeaningfulBits = BitAggregateMagic.NumBitsInLongInteger - leadingZeros - trailingZeros;
writer.WriteBits(numMeaningfulBits, NumBitsToEncodeNumMeaningfulBits, 0);
newValue.TrailingZeros = trailingZeros;
writer.WriteBits(xor, numMeaningfulBits, trailingZeros);
}
}
return true;
}
/// <summary>
/// Decodes the double value from the given reader.
/// </summary>
/// <param name="reader">The reader from which double value needs to be decoded.</param>
/// <param name="state">The state of previous decoding.</param>
/// <returns>Decoded double value with state.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static DoubleValueState ReadDouble(BitBinaryReader reader, DoubleValueState state)
{
var firstBit = reader.ReadBit();
if (!firstBit)
{
return state;
}
var secondBit = reader.ReadBit();
long meaningFulBits;
if (!secondBit)
{
var numBitsToRead = BitAggregateMagic.NumBitsInLongInteger - state.LeadingZeros - state.TrailingZeros;
meaningFulBits = reader.ReadBits(numBitsToRead);
}
else
{
// a new block position was started since the number starts with "11".
state.LeadingZeros = (sbyte)reader.ReadBits(NumBitsToEncodeNumLeadingZeros);
var numBitsToRead = (sbyte)reader.ReadBits(NumBitsToEncodeNumMeaningfulBits);
if (numBitsToRead == 0)
{
// The block size is 64 bits which becomes 0 in writing into 6 bits - overflow.
// If the block size were indeed 0 bits, the xor value would be 0, and the actual value would be identical to the prior value,
// so we would have bailed out early on since firstBit would be 0.
numBitsToRead = (sbyte)BitAggregateMagic.NumBitsInLongInteger;
}
state.TrailingZeros = (sbyte)(BitAggregateMagic.NumBitsInLongInteger - state.LeadingZeros - numBitsToRead);
meaningFulBits = reader.ReadBits(numBitsToRead);
}
var xor = meaningFulBits << state.TrailingZeros;
state.Value = BitConverter.Int64BitsToDouble(xor ^ BitConverter.DoubleToInt64Bits(state.Value));
return state;
}
private struct DoubleValueState
{
public double Value;
public sbyte LeadingZeros;
public sbyte TrailingZeros;
public DoubleValueState(double value, sbyte leadingZeros, sbyte trailingZeros)
{
this.Value = value;
this.LeadingZeros = leadingZeros;
this.TrailingZeros = trailingZeros;
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Crc.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System.IO;
using System.Runtime.CompilerServices;
/// <summary>
/// Class for computing CRC
/// </summary>
public static class Crc
{
private static uint[] crcTable = new uint[256]
{
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
0x2d02ef8d
};
/// <summary>
/// Computes the CRC for given stream of data.
/// </summary>
/// <param name="crc">Seed value (usually set to 0)</param>
/// <param name="data">Data stream.</param>
/// <param name="count">The number of bytes to process.</param>
/// <returns>CRC value</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint ComputeCrc(uint crc, Stream data, long count)
{
crc ^= ~((uint)0);
for (var i = 0; i < count; i++)
{
crc = crcTable[(crc ^ data.ReadByte()) & 0xff] ^ (crc >> 8);
}
return ~crc;
}
/// <summary>
/// Computes the CRC for given array of bytes starting at the specified offset till the end of the array.
/// </summary>
/// <param name="crc">Seed value (usually set to 0)</param>
/// <param name="data">Data array.</param>
/// <param name="offset">Offset in the byte array</param>
/// <returns>CRC value</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint ComputeCrc(uint crc, byte[] data, long offset)
{
return ComputeCrc(crc, data, offset, data.Length - offset);
}
/// <summary>
/// Computes the CRC for given part of the array of bytes.
/// </summary>
/// <param name="crc">Seed value (usually set to 0)</param>
/// <param name="data">Data array.</param>
/// <param name="offset">Offset in the byte array</param>
/// <param name="count">The number of bytes to process</param>
/// <returns>CRC value</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint ComputeCrc(uint crc, byte[] data, long offset, long count)
{
crc ^= ~((uint)0);
for (var i = offset; i < offset + count; i++)
{
crc = crcTable[(crc ^ data[i]) & 0xff] ^ (crc >> 8);
}
return ~crc;
}
}
}
<file_sep>// --------------------------------------------------------------------------------
// <copyright file="SessionType.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
/// <summary>
/// Types of ETW sessions that the user can select on the configuration.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363784(v=vs.85).aspx"/>
// ReSharper disable UnusedMember.Global
internal enum SessionType
{
/// <summary>
/// ETW events are going to be logged to a file.
/// </summary>
File = 0,
/// <summary>
/// The session will be a real-time one.
/// </summary>
Realtime = 1,
/// <summary>
/// The session will be private to each process and will be logged to
/// the respective files.
/// </summary>
Private = 2,
/// <summary>
/// The session is both a file and real-time session.
/// </summary>
/// <remarks>Defined also as the reverse form to facilitate parsing the enumeration value.</remarks>
FileAndRealtime = 3,
/// <summary>
/// The session is both a file and real-time session.
/// </summary>
/// <remarks>Defined also as the reverse form to facilitate parsing the enumeration value.</remarks>
RealtimeAndFile = 3,
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="VersionNotSupportedMetricSerializationException.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// <author email="selavrin">
// <NAME>
// </author>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
/// <summary>
/// An exception used to report about situation when version of deserializer doesn't support format of package it is used to deserialize.
/// </summary>
[Serializable]
public sealed class VersionNotSupportedMetricSerializationException : MetricSerializationException
{
/// <summary>
/// Initializes a new instance of the <see cref="VersionNotSupportedMetricSerializationException"/> class.
/// </summary>
/// <param name="message">A message explaining the cause for exception situation.</param>
public VersionNotSupportedMetricSerializationException(string message)
: base(message, null)
{
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="ComputedSamplingTypeExpression.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using Newtonsoft.Json;
/// <summary>
/// Computed sampling type expression.
/// </summary>
public sealed class ComputedSamplingTypeExpression : IComputedSamplingTypeExpression
{
private string name;
private string expression;
/// <summary>
/// Initializes a new instance of the <see cref="ComputedSamplingTypeExpression"/> class.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="expression">The expression.</param>
/// <param name="unit">The unit.</param>
public ComputedSamplingTypeExpression(string name, string expression, string unit)
{
if (string.IsNullOrWhiteSpace(name))
{
throw new ArgumentNullException(nameof(name));
}
if (string.IsNullOrWhiteSpace(expression))
{
throw new ArgumentNullException(nameof(expression));
}
this.Name = name;
this.Expression = expression;
this.IsBuiltIn = false;
this.Unit = unit;
}
/// <summary>
/// Initializes a new instance of the <see cref="ComputedSamplingTypeExpression"/> class.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="expression">The expression.</param>
/// <param name="isBuiltIn">if set to <c>true</c> [is built in].</param>
/// <param name="unit">The unit.</param>
[JsonConstructor]
internal ComputedSamplingTypeExpression(string name, string expression, bool isBuiltIn, string unit)
{
this.name = name;
this.expression = expression;
this.IsBuiltIn = isBuiltIn;
this.Unit = unit;
}
/// <summary>
/// Gets or sets the name of the expression.
/// </summary>
public string Name
{
get
{
return this.name;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.name = value;
}
}
/// <summary>
/// Gets or sets the expression.
/// </summary>
public string Expression
{
get
{
return this.expression;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.expression = value;
}
}
/// <summary>
/// Gets a value indicating whether this instance is built in.
/// </summary>
public bool IsBuiltIn { get; internal set; }
/// <summary>
/// Gets or sets the unit.
/// </summary>
public string Unit { get; set; }
}
}
<file_sep>// <copyright file="RawPreaggregateFilterQueryArguments.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ReSharper disable once CheckNamespace
namespace Microsoft.Cloud.Metrics.Client.PreaggregateFiltersManagement
{
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
/// <summary>
/// Represents the arguments for retrieving raw preaggregate filters.
/// </summary>
[JsonObject]
internal sealed class RawPreaggregateFilterQueryArguments
{
/// <summary>
/// Initializes a new instance of the <see cref="RawPreaggregateFilterQueryArguments"/> class.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="preaggregateDimensionNames">The preaggregate dimension names.</param>
/// <param name="count">The count of filters requested. Use 0 to denote all filters to be returned.</param>
/// <param name="offset">The offset of the requested filters page calculated based on the count of data returned.</param>
[JsonConstructor]
public RawPreaggregateFilterQueryArguments(
string monitoringAccount,
string metricNamespace,
string metricName,
IEnumerable<string> preaggregateDimensionNames,
int count,
int offset)
{
if (string.IsNullOrEmpty(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrEmpty(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrEmpty(metricName))
{
throw new ArgumentNullException(nameof(metricName));
}
if (preaggregateDimensionNames == null)
{
throw new ArgumentNullException(nameof(preaggregateDimensionNames));
}
if (count < 0)
{
throw new ArgumentException($"{nameof(count)} cannot be negative number");
}
if (offset < 0)
{
throw new ArgumentException($"{nameof(offset)} cannot be negative number");
}
this.MonitoringAccount = monitoringAccount;
this.MetricNamespace = metricNamespace;
this.MetricName = metricName;
this.PreaggregateDimensionNames = new SortedSet<string>(preaggregateDimensionNames, StringComparer.OrdinalIgnoreCase);
foreach (string dim in this.PreaggregateDimensionNames)
{
if (string.IsNullOrEmpty(dim))
{
throw new ArgumentException($"{nameof(preaggregateDimensionNames)} cannot have empty of null values");
}
}
this.Count = count;
this.Offset = offset;
}
/// <summary>
/// Gets or sets the monitoring account.
/// </summary>
public string MonitoringAccount { get; }
/// <summary>
/// Gets or sets the metric namespace.
/// </summary>
public string MetricNamespace { get; }
/// <summary>
/// Gets or sets the name of the metric.
/// </summary>
public string MetricName { get; }
/// <summary>
/// Gets or sets the preaggregate dimension names.
/// </summary>
public SortedSet<string> PreaggregateDimensionNames { get; }
/// <summary>
/// Gets the count of filters requested.
/// </summary>
public int Count { get; }
/// <summary>
/// Gets the offset of the requested filters page calculated based on the count of data returned.
/// </summary>
public int Offset { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="DiagnosticInfo.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
/// <summary>
/// A class hosting all diagnostic info for customers to send us to help troubleshooting.
/// </summary>
public sealed class DiagnosticInfo
{
/// <summary>
/// Gets the trace ID.
/// </summary>
public string TraceId { get; internal set; }
/// <summary>
/// Gets the handling server identifier.
/// </summary>
public string HandlingServerId { get; internal set; }
/// <summary>
/// Gets the error message.
/// </summary>
public string ErrorMessage { get; internal set; }
/// <summary>
/// Returns a <see cref="string" /> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="string" /> that represents this instance.
/// </returns>
public override string ToString()
{
return $"TraceId:{this.TraceId}, HandlingServerId:{this.HandlingServerId}, ErrorMessage:{this.ErrorMessage}.";
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IRawMetricConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
using Metrics;
/// <summary>
/// Represents a raw metric in MDM.
/// </summary>
public interface IRawMetricConfiguration : IMetricConfiguration
{
/// <summary>
/// Gets or sets the scaling factor.
/// </summary>
float? ScalingFactor { get; set; }
/// <summary>
/// Gets or sets a value indicating whether client publication is enabled.
/// </summary>
bool EnableClientPublication { get; set; }
/// <summary>
/// Gets or sets a value indicating whether client forking is enabled.
/// </summary>
bool EnableClientForking { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the metric will be published to the aggregated ETW channel.
/// </summary>
bool EnableClientEtwPublication { get; set; }
/// <summary>
/// Gets the raw sampling types (Sum, Count, or legacy MetricsClient sampling types).
/// </summary>
IEnumerable<SamplingType> RawSamplingTypes { get; }
/// <summary>
/// Gets the preaggregations of the metric.
/// </summary>
IEnumerable<IPreaggregation> Preaggregations { get; }
/// <summary>
/// Gets the dimensions of the metric.
/// </summary>
IEnumerable<string> Dimensions { get; }
/// <summary>
/// Gets the computed sampling types.
/// </summary>
IEnumerable<IComputedSamplingTypeExpression> ComputedSamplingTypes { get; }
/// <summary>
/// Gets or sets a value indicating whether only the last value seen for a time series is perserved on the client.
/// </summary>
/// <value>
/// <c>true</c> if last sampling mode is used; otherwise, <c>false</c>.
/// </value>
/// <remarks>
/// Client side last sampling mode means that within the collection interval (1m) only the last value set to the metric is kept. This means Sum == Min == Max
/// and Count == 1 for this metric when it is sent to the server.
/// </remarks>
bool EnableClientSideLastSamplingMode { get; }
/// <summary>
/// Determines whether this instance can add preaggregation to the metric configuration.
/// </summary>
/// <param name="preaggregationToAdd">The preaggregation to add.</param>
/// <returns>True if the preaggregation can be added.</returns>
bool CanAddPreaggregation(IPreaggregation preaggregationToAdd);
/// <summary>
/// Adds the preaggregate.
/// </summary>
/// <param name="preaggregate">The preaggregate.</param>
void AddPreaggregation(IPreaggregation preaggregate);
/// <summary>
/// Removes the preaggregate.
/// </summary>
/// <param name="preaggregateName">The name of the preaggregate.</param>
void RemovePreaggregation(string preaggregateName);
/// <summary>
/// Adds the type of the computed sampling.
/// </summary>
/// <param name="computedSamplingType">Type of the computed sampling.</param>
void AddComputedSamplingType(IComputedSamplingTypeExpression computedSamplingType);
/// <summary>
/// Removes the type of the computed sampling.
/// </summary>
/// <param name="computedSamplingTypeName">Name of the computed sampling type.</param>
void RemoveComputedSamplingType(string computedSamplingTypeName);
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IReadOnlyHistogram.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// <author email="selavrin">
// <NAME>
// </author>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System.Collections.Generic;
/// <summary>
/// Read-only interface for objects representing histograms.
/// </summary>
public interface IReadOnlyHistogram
{
/// <summary>
/// Gets the number of samples in the histogram.
/// </summary>
int SamplesCount { get; }
/// <summary>
/// Gets the list of histogram samples: ordered pairs of value-count.
/// </summary>
IEnumerable<KeyValuePair<ulong, uint>> Samples
{
get;
}
/// <summary>
/// Calculates percentile from the histogram.
/// </summary>
/// <param name="percent">Percent value for which to calculate percentile.</param>
/// <returns>Percentile value.</returns>
float GetPercentile(float percent);
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="EtwPayloadManipulationUtils.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Runtime.InteropServices;
using System.Text;
/// <summary>
/// This class contains functions used for metrics data serialization/deserialization to/from the ETW payload.
/// </summary>
internal static unsafe class EtwPayloadManipulationUtils
{
/// <summary>
/// Equivalent of + operator for IntPtr.
/// </summary>
/// <param name="ptr">Pointer value.</param>
/// <param name="offset">Offset to add.</param>
/// <returns>Incremented pointer value.</returns>
public static IntPtr Shift(IntPtr ptr, int offset)
{
return new IntPtr(ptr.ToInt64() + offset);
}
/// <summary>
/// Writes a string in UTF8 format to a buffer starting from the specified pointer.
/// The format is: string size in bytes (int), UTF8 string bytes.
/// </summary>
/// <param name="value">String value.</param>
/// <param name="pointerInPayload">Pointer to a buffer.</param>
/// <param name="bytesBuffer">Buffer to use during string encoding.</param>
/// <returns>A pointer shifted by number of bytes written to a buffer.</returns>
public static IntPtr WriteString(string value, IntPtr pointerInPayload, byte[] bytesBuffer)
{
var bytesCount = Encoding.UTF8.GetBytes(value, 0, value.Length, bytesBuffer, 0);
*((ushort*)pointerInPayload) = (ushort)bytesCount;
pointerInPayload = new IntPtr(pointerInPayload.ToInt64() + sizeof(ushort));
Marshal.Copy(bytesBuffer, 0, pointerInPayload, bytesCount);
return new IntPtr(pointerInPayload.ToInt64() + bytesCount);
}
/// <summary>
/// Reads string value encoded in buffer in UTF8 format.
/// </summary>
/// <param name="pointerInPayload">A pointer to a buffer where string bytes are stored.
/// It will be updated to offset equal to number of bytes occupied by the string.</param>
/// <returns>String values read.</returns>
public static string ReadString(ref IntPtr pointerInPayload)
{
ushort strLen = *((ushort*)pointerInPayload);
pointerInPayload = new IntPtr(pointerInPayload.ToInt64() + sizeof(ushort));
var stringOnPayload = new string((sbyte*)pointerInPayload, 0, strLen, Encoding.UTF8);
pointerInPayload = new IntPtr(pointerInPayload.ToInt64() + strLen);
return stringOnPayload;
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="LocalAggregatedMetric.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using Etw;
using Online.Metrics.Serialization;
/// <summary>
/// The class representing the locally aggregated metric in the ETW stream.
/// </summary>
internal sealed class LocalAggregatedMetric : ILocalAggregatedMetric
{
/// <summary>
/// Represents the character used to separate items within a list stored in a single ETW field.
/// </summary>
private static readonly char[] EtwListSeparatorChar = { '^' };
/// <summary>
/// The dimension name and dimension value pairs.
/// </summary>
private readonly Dictionary<string, string> dimensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Gets the Monitoring Account to which this metric is reported.
/// </summary>
public string MonitoringAccount { get; private set; }
/// <summary>
/// Gets the metric namespace.
/// </summary>
public string MetricNamespace { get; private set; }
/// <summary>
/// Gets the metric name.
/// </summary>
public string MetricName { get; private set; }
/// <summary>
/// Gets the time in UTC when metric was reported.
/// </summary>
public DateTime MetricTimeUtc { get; private set; }
/// <summary>
/// Gets the dimension name-value dictionary.
/// </summary>
/// <remarks>The dimension names are case insensitive.</remarks>
public IReadOnlyDictionary<string, string> Dimensions
{
get
{
return this.dimensions;
}
}
/// <summary>
/// Gets the scaling factor applied to metric values.
/// </summary>
public float ScalingFactor { get; private set; }
/// <summary>
/// Gets the number of samples for which this metric is reported.
/// </summary>
public uint Count { get; private set; }
/// <summary>
/// Gets the scaled sum of sample values reported this metric.
/// </summary>
public float ScaledSum { get; private set; }
/// <summary>
/// Gets the scaled minimum value of samples reported this metric.
/// </summary>
public float ScaledMin { get; private set; }
/// <summary>
/// Gets the scaled maximum value of samples reported this metric.
/// </summary>
public float ScaledMax { get; private set; }
/// <summary>
/// Gets the sum of sample values reported this metric.
/// </summary>
public ulong Sum { get; private set; }
/// <summary>
/// Gets the minimum value of samples reported this metric.
/// </summary>
public ulong Min { get; private set; }
/// <summary>
/// Gets the maximum value of samples reported this metric.
/// </summary>
public ulong Max { get; private set; }
/// <summary>
/// Converts content of the ETW event published by ME to a <see cref="LocalAggregatedMetric"/>
/// </summary>
/// <param name="etwMetricData">Object containing information about metric data sample.</param>
/// <returns>A MetricData object representing a locally aggregated metric.</returns>
internal static unsafe LocalAggregatedMetric ConvertToMetricData(NativeMethods.EventRecord* etwMetricData)
{
var metricData = new LocalAggregatedMetric();
IntPtr pointerInPayload = etwMetricData->UserData;
metricData.MonitoringAccount = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
metricData.MetricNamespace = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
metricData.MetricName = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
long timestamp = *((long*)pointerInPayload);
metricData.MetricTimeUtc = DateTime.FromFileTimeUtc(timestamp);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(long));
// Read the dimension name and values and split them out.
var dimensionNames = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
var dimensionValues = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
if (!string.IsNullOrWhiteSpace(dimensionNames) && !string.IsNullOrWhiteSpace(dimensionValues))
{
var splitDimensionNames = dimensionNames.Split(EtwListSeparatorChar, StringSplitOptions.None);
var splitDimensionValues = dimensionValues.Split(EtwListSeparatorChar, StringSplitOptions.None);
// Expected that both lengths be the same since they are written this way.
for (var x = 0; x < splitDimensionNames.Length && x < splitDimensionValues.Length; ++x)
{
metricData.dimensions[splitDimensionNames[x]] = splitDimensionValues[x];
}
}
var scalingFactor = *((float*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(float));
metricData.ScalingFactor = scalingFactor;
var samplingTypes = (SamplingTypes)(*((int*)pointerInPayload));
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(int));
if ((samplingTypes & SamplingTypes.Min) != 0)
{
metricData.Min = *((ulong*)pointerInPayload);
metricData.ScaledMin = metricData.Min / scalingFactor;
}
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(ulong));
if ((samplingTypes & SamplingTypes.Max) != 0)
{
metricData.Max = *((ulong*)pointerInPayload);
metricData.ScaledMax = metricData.Max / scalingFactor;
}
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(ulong));
if ((samplingTypes & SamplingTypes.Sum) != 0)
{
metricData.Sum = *((ulong*)pointerInPayload);
metricData.ScaledSum = metricData.Sum / scalingFactor;
}
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(ulong));
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(float));
if ((samplingTypes & SamplingTypes.Count) != 0)
{
metricData.Count = *((uint*)pointerInPayload);
}
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(uint));
return metricData;
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="DimensionFilter.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using Newtonsoft.Json;
/// <summary>
/// A filter to include only specific dimension values, if any.
/// </summary>
public sealed class DimensionFilter
{
/// <summary>
/// The dimension name.
/// </summary>
private readonly string dimensionName;
/// <summary>
/// The dimension values.
/// </summary>
private readonly string[] dimensionValues;
/// <summary>
/// Flag to indicate if this is exclude filter.
/// </summary>
[SuppressMessage("StyleCop.CSharp.NamingRules", "SA1305:FieldNamesMustNotUseHungarianNotation", Justification = "Reviewed. Suppression is OK here.")]
private readonly bool isExcludeFilter;
/// <summary>
/// Initializes a new instance of the <see cref="DimensionFilter" /> class.
/// </summary>
/// <param name="dimensionName">The dimension name.</param>
/// <param name="dimensionValues">The dimension values.</param>
/// <param name="isExcludeFilter">If set to <c>true</c> [is exclude filter].</param>
/// <remarks>
/// By default, this is an include filter.
/// </remarks>
[JsonConstructor]
public DimensionFilter(string dimensionName, IEnumerable<string> dimensionValues, bool isExcludeFilter)
{
if (string.IsNullOrWhiteSpace(dimensionName))
{
throw new ArgumentException("dimensionName is null or empty");
}
this.dimensionName = dimensionName;
this.dimensionValues = dimensionValues != null ? dimensionValues.ToArray() : null;
this.isExcludeFilter = isExcludeFilter;
}
/// <summary>
/// Gets the dimension name.
/// </summary>
public string DimensionName
{
get
{
return this.dimensionName;
}
}
/// <summary>
/// The dimension values.
/// </summary>
public IReadOnlyList<string> DimensionValues
{
get
{
return this.dimensionValues;
}
}
/// <summary>
/// Gets a value indicating whether this instance is an exclude dimension filter.
/// </summary>
public bool IsExcludeFilter
{
get
{
return this.isExcludeFilter;
}
}
/// <summary>
/// Performs an implicit conversion from <see cref="string"/> to <see cref="DimensionFilter"/>.
/// </summary>
/// <param name="dimensionName">The dimension name.</param>
/// <returns>
/// The result of the conversion.
/// </returns>
public static implicit operator DimensionFilter(string dimensionName)
{
return CreateIncludeFilter(dimensionName);
}
/// <summary>
/// Creates an include dimension filter.
/// </summary>
/// <param name="dimensionName">The dimension name.</param>
/// <param name="dimensionValues">The dimension values.</param>
/// <returns>An include dimension filter.</returns>
public static DimensionFilter CreateIncludeFilter(string dimensionName, params string[] dimensionValues)
{
return CreateIncludeFilter(dimensionName, dimensionValues.AsEnumerable());
}
/// <summary>
/// Creates an include dimension filter.
/// </summary>
/// <param name="dimensionName">The dimension name.</param>
/// <param name="dimensionValues">The dimension values.</param>
/// <returns>An include dimension filter.</returns>
public static DimensionFilter CreateIncludeFilter(string dimensionName, IEnumerable<string> dimensionValues)
{
return new DimensionFilter(dimensionName, dimensionValues, isExcludeFilter: false);
}
/// <summary>
/// Creates an exclude dimension filter.
/// </summary>
/// <param name="dimensionName">The dimension name.</param>
/// <param name="dimensionValues">The dimension values.</param>
/// <returns>An exclude dimension filter.</returns>
public static DimensionFilter CreateExcludeFilter(string dimensionName, params string[] dimensionValues)
{
return CreateExcludeFilter(dimensionName, dimensionValues.AsEnumerable());
}
/// <summary>
/// Creates an exclude dimension filter.
/// </summary>
/// <param name="dimensionName">The dimension name.</param>
/// <param name="dimensionValues">The dimension values.</param>
/// <returns>An exclude dimension filter.</returns>
public static DimensionFilter CreateExcludeFilter(string dimensionName, IEnumerable<string> dimensionValues)
{
return new DimensionFilter(dimensionName, dimensionValues, isExcludeFilter: true);
}
}
}<file_sep>using k8s;
using System.Collections.Generic;
namespace AzSignalR.Monitor.JobRegistry
{
/// <summary>
/// A workaround for the connection leak issue in ServiceClient (base of Kubernetes).
///
/// see: https://github.com/Azure/azure-sdk-for-net/issues/5977
/// </summary>
class FixedKubernetes : Kubernetes
{
public FixedKubernetes(KubernetesClientConfiguration config) : base(config) { }
protected override void Dispose(bool disposing)
{
HttpClientHandler?.Dispose();
// base.Dispose will set the HttpClientHandler to null.
base.Dispose(disposing);
}
}
public class KubernetesClientCache
{
private readonly IDictionary<string, KubernetesClientWrapper> _instances = new Dictionary<string, KubernetesClientWrapper>();
public Kubernetes Get(string cluster, string kubeConfig)
{
if (_instances.TryGetValue(cluster, out var wrapper))
{
if (wrapper.KubeConfig == kubeConfig)
{
return wrapper.Client;
}
}
lock (_instances)
{
if (_instances.TryGetValue(cluster, out wrapper))
{
if (wrapper.KubeConfig == kubeConfig)
{
return wrapper.Client;
}
}
try
{
using (var kubeConfigStream = Utils.GenerateStreamFromString(kubeConfig))
{
var config = KubernetesClientConfiguration.BuildConfigFromConfigFile(kubeConfigStream);
var client = new FixedKubernetes(config);
_instances.Add(cluster, new KubernetesClientWrapper(kubeConfig, client));
return client;
}
}
finally
{
if (wrapper != null)
{
// this won't clean all the resources, that's why we need the Cache impl here
wrapper.Client?.Dispose();
}
}
}
}
class KubernetesClientWrapper
{
public string KubeConfig { get; }
public Kubernetes Client { get; }
public KubernetesClientWrapper(string config, Kubernetes client)
{
KubeConfig = config;
Client = client;
}
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="IFrontEndMetricBuilder.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// The interface used by deserializer to abstract the way how deserialized objects are created.
/// It is up to the user of the deserializer to implement type to which data are deserialized, use object pools or not etc.
/// </summary>
/// <typeparam name="TMetadata">Type of metadata to use for creation of metric data objects.</typeparam>
public interface IFrontEndMetricBuilder<TMetadata>
where TMetadata : IMetricMetadata
{
/// <summary>
/// Sets the packet serialization version.
/// </summary>
/// <param name="serializationVersion">Serialization version.</param>
void SetSerializationVersion(ushort serializationVersion);
/// <summary>
/// Gets interned string for given value.
/// </summary>
/// <param name="value">String to be interned.</param>
/// <returns>Interned string.</returns>
string GetString(string value);
/// <summary>
/// Creates the custom object representing metric metadata. Note that the same metadata object can be shared
/// by many metric instances.
/// </summary>
/// <param name="metricNamespace">Namespace of the metric.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="dimensionNames">Names of the metric dimensions.</param>
/// <returns>The metric metadata representing the given parameters.</returns>
TMetadata CreateMetadata(string metricNamespace, string metricName, IReadOnlyList<string> dimensionNames);
/// <summary>
/// Start adding metric data with populating common values.
/// </summary>
/// <param name="metadata">Metric metadata.</param>
/// <param name="dimensionValues">List of dimension values.</param>
/// <param name="timeUtc">Time of the metric.</param>
/// <param name="samplingTypes">Available sampling types of the metric.</param>
/// <param name="count">Count value of the metric (how many times it was reported).</param>
/// <param name="sum">Sum value of the metric.</param>
/// <param name="min">Minimum value of the metric.</param>
/// <param name="max">Maximum value of the metric.</param>
/// <param name="sumOfSquareDiffFromMean">Sum of squares differences from mean of the metric.</param>
void BeginMetricCreation(
TMetadata metadata,
IReadOnlyList<string> dimensionValues,
DateTime timeUtc,
SamplingTypes samplingTypes,
uint count,
MetricValueV2 sum,
MetricValueV2 min,
MetricValueV2 max,
double sumOfSquareDiffFromMean);
/// <summary>
/// Assigns a histogram to the metric being built.
/// </summary>
/// <param name="value">Histogram of the metric.</param>
void AssignHistogram(IReadOnlyList<KeyValuePair<ulong, uint>> value);
/// <summary>
/// Assigns a tdigest to the metric being built
/// </summary>
/// <param name="reader">Reader containing the data.</param>
/// <param name="length">Length of data to read.</param>
void AssignTDigest(BinaryReader reader, int length);
/// <summary>
/// Assigns hyperloglogsketches to the metric being built.
/// </summary>
/// <param name="reader">Stream containing the data.</param>
/// <param name="length">Length of data to read.</param>
void AssignHyperLogLogSketch(BinaryReader reader, int length);
/// <summary>
/// Signals that deserializer has completed metric deserialization.
/// </summary>
void EndMetricCreation();
}
}
<file_sep>//---------------------------------------------------------------------------------
// <copyright file="CollectorConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//---------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// Type to read the configuration of a single collector.
/// </summary>
internal sealed class CollectorConfiguration
{
/// <summary>
/// Initializes a new instance of the <see cref="CollectorConfiguration" /> class.
/// </summary>
/// <param name="etwSessionsPrefix">The etw session name prefix.</param>
public CollectorConfiguration(string etwSessionsPrefix)
{
var cpuNum = Environment.ProcessorCount;
// ETW buffer properties
this.FlushTimerSec = 0;
this.MinBufferCount = 2 * cpuNum;
this.MaxBufferCount = 2 * this.MinBufferCount;
this.BufferSizeKB = 256;
this.ClockType = ClockType.System;
this.SessionType = SessionType.Realtime;
this.DeprecatedCollector = null;
this.MaxFileSizeMB = 100;
this.MaxFileTimeSpan = TimeSpan.FromMinutes(5);
this.MaxFileCount = 1440;
this.OriginalName = "Collector";
this.Name = GetNormalizedSessionName(this.OriginalName, this.SessionType, etwSessionsPrefix);
// Providers - by default empty
this.Providers = new Dictionary<Guid, ProviderConfiguration>();
}
/// <summary>
/// Gets the name of the ETW session to be created for this collector. This is the name found in the
/// configuration file normalized to make possible to identify ETW sessions created by the MonitoringAgent.
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Gets the name of the collector as defined by the user in the configuration file.
/// </summary>
public string OriginalName { get; private set; }
/// <summary>
/// Gets the name of an ETW session that was used by previous versions of the dependent service that
/// should be stopped when the service starts to use a session provided via the MonitoringAgent.
/// </summary>
/// <remarks>
/// This is important when the dependant service fails to stop the ETW realtime session that was used
/// before having the service using the session created by the MonitoringAgent. In this case there will
/// be no listeners for the legacy session and eventually both sessions (the legacy and the one provided
/// by the MonitoringAgent) will start to drop events.
/// </remarks>
public string DeprecatedCollector { get; set; }
/// <summary>
/// Gets how often, in seconds, the trace buffers are forcibly flushed.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363784(v=vs.85).aspx"/>
public int FlushTimerSec { get; set; }
/// <summary>
/// Gets the minimum number of buffers to be allocated to the ETW session.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363784(v=vs.85).aspx"/>
public int MinBufferCount { get; set; }
/// <summary>
/// Gets the maximum number of buffers to be allocated to the ETW session.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363784(v=vs.85).aspx"/>
public int MaxBufferCount { get; set; }
/// <summary>
/// Gets the buffer size, in KB, to be used in the session.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363784(v=vs.85).aspx"/>
public int BufferSizeKB { get; set; }
/// <summary>
/// Gets the clock type to be used in the ETW session. Check ClockType to see how this should
/// be written in the configuration file.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa364160(v=vs.85).aspx"/>
public ClockType ClockType { get; set; }
/// <summary>
/// Gets the logging mode to be used in the ETW session.
/// </summary>
/// <see href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa363784(v=vs.85).aspx"/>
public SessionType SessionType { get; set; }
/// <summary>
/// Gets the maximum size, in megabytes, that an ETL file should be allowed to grow.
/// </summary>
public int MaxFileSizeMB { get; set; }
/// <summary>
/// Gets the maximum time that each ETL file should cover.
/// </summary>
public TimeSpan MaxFileTimeSpan { get; set; }
/// <summary>
/// Gets the maximum number of ETL files for this collector that should be allowed
/// to exist on disk.
/// </summary>
public int MaxFileCount { get; set; }
/// <summary>
/// Gets the list of provider configurations for the collector.
/// </summary>
public Dictionary<Guid, ProviderConfiguration> Providers { get; set; }
/// <summary>
/// Gets a normalized name from the session name originally specified in the configuration. The normalized
/// name will be the ETW session name.
/// </summary>
/// <param name="originalName">The session name to be normalized.</param>
/// <param name="sessionType">The type of the session.</param>
/// <param name="etwSessionsPrefix">The etw session name prefix.</param>
/// <returns>
/// The <see cref="string" /> with the normalized session name.
/// </returns>
/// <exception cref="System.IO.InvalidDataException">The specified session type is not recognized: + sessionType</exception>
public static string GetNormalizedSessionName(string originalName, SessionType sessionType, string etwSessionsPrefix)
{
if (originalName.Equals("NT Kernel Logger", StringComparison.OrdinalIgnoreCase))
{
return originalName;
}
string sessionTypeAbbr;
switch (sessionType)
{
case SessionType.File:
sessionTypeAbbr = "file-";
break;
case SessionType.FileAndRealtime:
sessionTypeAbbr = "file+live-";
break;
case SessionType.Private:
sessionTypeAbbr = "private-";
break;
case SessionType.Realtime:
sessionTypeAbbr = "live-";
break;
default:
throw new InvalidDataException(
"The specified session type is not recognized: " + sessionType);
}
return etwSessionsPrefix + sessionTypeAbbr + originalName;
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Logging;
using Microsoft.Cloud.Metrics.Client.Metrics;
using Microsoft.Cloud.Metrics.Client.Utility;
using Monitors;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
/// <summary>
/// This class manages get and save operations on metric configurations.
/// </summary>
public sealed class MetricConfigurationManager : IMetricConfigurationManager
{
private static readonly object LogId = Logger.CreateCustomLogId("MetricConfigurationManager");
private readonly ConnectionInfo connectionInfo;
private readonly HttpClient httpClient;
private readonly string metricConfigurationUrlPrefix;
private readonly string metricUrlPrefix;
private readonly JsonSerializerSettings serializerSettings;
private readonly MonitorConfigurationManager monitorConfigManager;
/// <summary>
/// Initializes a new instance of the <see cref="MetricConfigurationManager"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the MDM endpoint being used.</param>
public MetricConfigurationManager(ConnectionInfo connectionInfo)
: this(connectionInfo, HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo))
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MetricConfigurationManager"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information for the MDM endpoint being used.</param>
/// <param name="client">HttpClient for the connection to the MDM endpoint being used.</param>
internal MetricConfigurationManager(ConnectionInfo connectionInfo, HttpClient client)
{
if (connectionInfo == null)
{
throw new ArgumentNullException(nameof(connectionInfo));
}
this.connectionInfo = connectionInfo;
this.metricConfigurationUrlPrefix = this.connectionInfo.GetAuthRelativeUrl("v1/config/metricConfiguration/");
this.metricUrlPrefix = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.ConfigRelativeUrl);
this.monitorConfigManager = new MonitorConfigurationManager(this.connectionInfo);
this.httpClient = client;
var migrations = new[]
{
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.ComputedSamplingTypeExpressionImpl",
typeof(ComputedSamplingTypeExpression)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.PreaggregationImpl",
typeof(Preaggregation)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.MinMaxConfigurationImpl",
typeof(MinMaxConfiguration)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.PercentileConfigurationImpl",
typeof(PercentileConfiguration)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.RollupConfigurationImpl",
typeof(RollupConfiguration)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.PublicationConfigurationImpl",
typeof(PublicationConfiguration)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.DistinctCountConfigurationImpl",
typeof(DistinctCountConfiguration)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.RawMetricConfigurationImpl",
typeof(RawMetricConfiguration)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.CompositeMetricConfigurationImpl",
typeof(CompositeMetricConfiguration)),
new ClientAssemblyMigration(
"Metrics.Server",
"Microsoft.Online.Metrics.Server.Utilities.ConfigurationUpdateResult",
typeof(ConfigurationUpdateResult)),
new ClientAssemblyMigration(
"Microsoft.Online.Metrics.Common",
"Microsoft.Online.Metrics.Common.EventConfiguration.FilteringConfigurationImpl",
typeof(FilteringConfiguration)),
};
this.serializerSettings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.Auto,
Binder = new ClientAssemblyMigrationSerializationBinder(migrations)
};
this.MaxParallelRunningTasks = 20;
}
/// <summary>
/// Gets or sets the maximum parallel running tasks.
/// </summary
public int MaxParallelRunningTasks { get; set; }
/// <summary>
/// Get the metric specified by the account, namespace and metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">The metric name.</param>
/// <returns>The metric.</returns>
public async Task<IMetricConfiguration> GetAsync(IMonitoringAccount monitoringAccount, string metricNamespace, string metricName)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrWhiteSpace(metricName))
{
throw new ArgumentNullException(nameof(metricName));
}
var path = $"{this.metricConfigurationUrlPrefix}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricNamespace)}/metric/{SpecialCharsHelper.EscapeTwice(metricName)}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount.Name))
{
Path = path
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Get,
this.httpClient,
monitoringAccount.Name,
this.metricConfigurationUrlPrefix).ConfigureAwait(false);
return JsonConvert.DeserializeObject<IMetricConfiguration[]>(response.Item1, this.serializerSettings)[0];
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode.HasValue && mce.ResponseStatusCode.Value == HttpStatusCode.NotFound)
{
throw new MetricNotFoundException($"Metric [{monitoringAccount.Name}][{metricNamespace}][{metricName}] not found. TraceId: [{mce.TraceId}]", mce);
}
throw;
}
}
/// <summary>
/// Get the metric specified by the account, namespace and metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="returnEmptyConfig">Determine if empty, unmodified configurations should be returned.</param>
/// <returns>The metrics that match the criteria.</returns>
public async Task<IReadOnlyList<IMetricConfiguration>> GetMultipleAsync(IMonitoringAccount monitoringAccount, string metricNamespace, bool returnEmptyConfig = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
var path = $"{this.metricConfigurationUrlPrefix}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricNamespace)}";
var query = $"includeEmptyConfig={returnEmptyConfig}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount.Name))
{
Path = path,
Query = query
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Get,
this.httpClient,
monitoringAccount.Name,
this.metricConfigurationUrlPrefix).ConfigureAwait(false);
return JsonConvert.DeserializeObject<IMetricConfiguration[]>(response.Item1, this.serializerSettings);
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode.HasValue && mce.ResponseStatusCode.Value == HttpStatusCode.NotFound)
{
throw new MetricNotFoundException($"Metrics under [{monitoringAccount.Name}][{metricNamespace}] not found. TraceId: [{mce.TraceId}]", mce);
}
throw;
}
}
/// <summary>
/// Get the metric specified by the account, namespace and metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="returnEmptyConfig">Determine if empty, unmodified configurations should be returned.</param>
/// <returns>The metrics that match the criteria.</returns>
public async Task<IReadOnlyList<IMetricConfiguration>> GetMultipleAsync(IMonitoringAccount monitoringAccount, bool returnEmptyConfig = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var path = $"{this.metricConfigurationUrlPrefix}/monitoringAccount/{monitoringAccount.Name}";
var query = $"includeEmptyConfig={returnEmptyConfig}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount.Name))
{
Path = path,
Query = query
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Get,
this.httpClient,
monitoringAccount.Name,
this.metricConfigurationUrlPrefix).ConfigureAwait(false);
return JsonConvert.DeserializeObject<IMetricConfiguration[]>(response.Item1, this.serializerSettings);
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode.HasValue && mce.ResponseStatusCode.Value == HttpStatusCode.NotFound)
{
throw new MetricNotFoundException($"Metrics under [{monitoringAccount.Name}] not found. TraceId: [{mce.TraceId}]", mce);
}
throw;
}
}
/// <summary>
/// Save the metric configuration provided.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration.</param>
/// <param name="metricConfiguration">The metric to be saved.</param>
/// <param name="skipVersionCheck">Flag indicating whether or not the version flag should be honored.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task SaveAsync(IMonitoringAccount monitoringAccount, IMetricConfiguration metricConfiguration, bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (metricConfiguration == null)
{
throw new ArgumentNullException(nameof(metricConfiguration));
}
var path = $"{this.metricConfigurationUrlPrefix}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricConfiguration.MetricNamespace)}/metric/{SpecialCharsHelper.EscapeTwice(metricConfiguration.Name)}/skipVersionCheck/{skipVersionCheck}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount.Name))
{
Path = path,
Query = "apiVersion=1"
};
var serializedMetric = JsonConvert.SerializeObject(new[] { metricConfiguration }, Formatting.Indented, this.serializerSettings);
try
{
await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
this.metricConfigurationUrlPrefix,
httpContent: serializedMetric).ConfigureAwait(false);
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode == HttpStatusCode.BadRequest)
{
throw new ConfigurationValidationException(
$"Metric [{monitoringAccount.Name}][{metricConfiguration.MetricNamespace}][{metricConfiguration.Name}] could not be saved because validation failed. Response: {mce.Message}",
ValidationType.ServerSide,
mce);
}
throw;
}
}
/// <summary>
/// Deletes the metric configuration by metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <returns>A task the caller can wait on.</returns>
public async Task DeleteAsync(IMonitoringAccount monitoringAccount, string metricNamespace, string metricName)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrWhiteSpace(metricName))
{
throw new ArgumentNullException(nameof(metricName));
}
var operation = $"{this.connectionInfo.GetAuthRelativeUrl(string.Empty)}v1/config/metrics";
var path =
$"{operation}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricNamespace)}/metric/{SpecialCharsHelper.EscapeTwice(metricName)}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetEndpoint(monitoringAccount.Name))
{
Path = path
};
await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Delete,
this.httpClient,
monitoringAccount.Name,
operation).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<IConfigurationUpdateResult>> SyncAllAsync(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var metricReader = new MetricReader(this.connectionInfo);
var namespaces =
await metricReader.GetNamespacesAsync(monitoringAccount.Name).ConfigureAwait(false);
IReadOnlyList<IConfigurationUpdateResult> results = null;
List<string> namespaceswithTimeout = new List<string>();
foreach (var ns in namespaces)
{
try
{
var namespaceResults =
await this.SyncAllAsync(monitoringAccount, ns, skipVersionCheck)
.ConfigureAwait(false);
if (namespaceResults.Any(updateResult => !updateResult.Success))
{
return namespaceResults;
}
// For QOS namespaces or other internal namespaces, there is no configuration to
// replicate and thus the namespace results is an empty list.
if (namespaceResults.Count > 0)
{
results = namespaceResults;
}
}
catch (MetricsClientException mce)
{
if (!mce.ResponseStatusCode.HasValue || mce.ResponseStatusCode == HttpStatusCode.GatewayTimeout)
{
namespaceswithTimeout.Add(ns);
}
else
{
throw;
}
}
}
if (namespaceswithTimeout.Count > 0)
{
var msg =
$"Failed to sync all configurations for namespaces:{string.Join(",", namespaceswithTimeout)}."
+ " Please try again for these namespaces.";
throw new MetricsClientException(msg);
}
return results;
}
/// <inheritdoc />
public async Task<IReadOnlyList<IConfigurationUpdateResult>> SyncAllAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrEmpty(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
var operation = $"{this.metricUrlPrefix}/replicateConfigurationToMirrorAccounts";
var path =
$"{operation}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricNamespace)}/skipVersionCheck/{skipVersionCheck}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetGlobalEndpoint())
{
Path = path
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
operation).ConfigureAwait(false);
return JsonConvert.DeserializeObject<ConfigurationUpdateResult[]>(
response.Item1,
this.serializerSettings);
}
catch (MetricsClientException mce)
{
if (mce.ResponseStatusCode == HttpStatusCode.BadRequest)
{
var exMsg = $"Unable to sync all configuration for metric namespace : {metricNamespace} as either "
+ $"no mirror accounts found for monitoring account : {monitoringAccount.Name} or user doesn't "
+ $"have permission to update configurations in mirror accounts. Response : {mce.Message}";
throw new ConfigurationValidationException(exMsg, ValidationType.ServerSide, mce);
}
throw;
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncAllAsyncV2(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var metricReader = new MetricReader(this.connectionInfo);
var namespaces =
await metricReader.GetNamespacesAsync(monitoringAccount.Name).ConfigureAwait(false);
List<ConfigurationUpdateResultList> results = new List<ConfigurationUpdateResultList>();
foreach (var ns in namespaces)
{
var namespaceResults = await this.SyncAllAsyncV2(
monitoringAccount,
ns,
skipVersionCheck).ConfigureAwait(false);
// For QOS namespaces or other internal namespaces, there is no configuration to
// replicate and thus the namespace results is an empty list.
if (namespaceResults.Count > 0)
{
results.AddRange(namespaceResults);
}
}
return results;
}
/// <inheritdoc />
public async Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncAllAsyncV2(
IMonitoringAccount monitoringAccount,
string metricNamespace,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrEmpty(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
var metricReader = new MetricReader(this.connectionInfo);
var metricNames = await metricReader.GetMetricNamesAsync(
monitoringAccount.Name,
metricNamespace).ConfigureAwait(false);
var taskList = new List<Task<ConfigurationUpdateResultList>>(this.MaxParallelRunningTasks);
List<ConfigurationUpdateResultList> results = new List<ConfigurationUpdateResultList>();
foreach (var metricName in metricNames)
{
if (taskList.Count == this.MaxParallelRunningTasks)
{
await this.WaitAllForSyncAllAsyncV2(taskList, results).ConfigureAwait(false);
taskList.Clear();
}
taskList.Add(this.SyncConfigurationAsync(monitoringAccount, metricNamespace, metricName, skipVersionCheck));
}
if (taskList.Count > 0)
{
await this.WaitAllForSyncAllAsyncV2(taskList, results).ConfigureAwait(false);
taskList.Clear();
}
return results;
}
/// <inheritdoc />
public async Task<ConfigurationUpdateResultList> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
string metricName,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrWhiteSpace(metricName))
{
throw new ArgumentNullException(nameof(metricName));
}
var operation = $"{this.metricUrlPrefix}/replicateConfigurationToMirrorAccounts";
var path =
$"{operation}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricNamespace)}/skipVersionCheck/{skipVersionCheck}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetGlobalEndpoint())
{
Path = path,
Query = $"metricName={metricName}"
};
var result = new ConfigurationUpdateResultList
{
MonitoringAccount = monitoringAccount.Name,
MetricNamespace = metricNamespace,
MetricName = metricName
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
operation).ConfigureAwait(false);
result.ConfigurationUpdateResults =
JsonConvert.DeserializeObject<ConfigurationUpdateResult[]>(
response.Item1,
this.serializerSettings);
foreach (var updateResult in result.ConfigurationUpdateResults)
{
if (!updateResult.Success)
{
result.Success = false;
result.ExceptionMessage = updateResult.Message;
return result;
}
}
// Sync monitor configurations
var monitorConfigurationUpdatedResult = await this.monitorConfigManager.SyncConfigurationAsync(
monitoringAccount,
metricNamespace,
metricName,
skipVersionCheck)
.ConfigureAwait(false);
if (monitorConfigurationUpdatedResult.ConfigurationUpdateResults == null || !monitorConfigurationUpdatedResult.ConfigurationUpdateResults.Any())
{
result.Success = false;
return result;
}
foreach (var updateResult in monitorConfigurationUpdatedResult.ConfigurationUpdateResults)
{
if (!updateResult.Success)
{
result.Success = false;
result.ExceptionMessage = updateResult.Message;
return result;
}
}
result.Success = true;
return result;
}
catch (MetricsClientException mce)
{
result.Success = false;
if (mce.ResponseStatusCode == HttpStatusCode.Unauthorized || mce.ResponseStatusCode == HttpStatusCode.Forbidden)
{
var exMsg =
$"Unable to sync configuration for monitoringAccount:{monitoringAccount.Name}, metricNamespace:"
+ $"{metricNamespace}, metricName:{metricName} as user"
+ $"doesn't have permission to update configurations in mirror accounts. Response:{mce.Message}";
throw new ConfigurationValidationException(exMsg, ValidationType.ServerSide, mce);
}
else
{
result.ExceptionMessage = mce.Message;
}
return result;
}
}
/// <inheritdoc />
public async Task<OperationStatus> DownloadMetricConfigurationAsync(
string destinationFolder,
IMonitoringAccount monitoringAccount,
string metricNamespace = null,
string metricName = null,
Regex metricNameRegex = null,
bool foldersOnNamespacesLevel = false,
bool downloadDefaultMetricConfig = false,
int maxFileNameProducedLength = 256)
{
const string logTag = "DownloadMetricConfigurationAsync";
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Target folder to save configurations is {destinationFolder}.");
if (!FileOperationHelper.CreateFolderIfNotExists(destinationFolder))
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Cannot create folder {destinationFolder} on local disk.");
return OperationStatus.FolderCreationError;
}
var reader = new MetricReader(this.connectionInfo);
IReadOnlyList<string> namespaces = string.IsNullOrWhiteSpace(metricNamespace)
? await reader.GetNamespacesAsync(monitoringAccount.Name).ConfigureAwait(false)
: new[] { metricNamespace };
if (namespaces == null || namespaces.Count == 0)
{
Logger.Log(LoggerLevel.Warning, LogId, logTag, $"No namespace is found under {monitoringAccount.Name}!");
return OperationStatus.ResourceNotFound;
}
var operationResult = OperationStatus.CompleteSuccess;
var getMetricsTaskList = new List<Task<IMetricConfiguration>>();
// Metrics counters for better statistics
int totalMetricsCount = 0;
int retrievedMetricsCount = 0;
int skippedMetricsCount = 0;
foreach (var currentNamespace in namespaces)
{
var currentFolder = destinationFolder;
if (foldersOnNamespacesLevel)
{
var subFolder = FileNamePathHelper.ConvertPathToValidFolderName(currentNamespace);
currentFolder += Path.DirectorySeparatorChar + subFolder;
if (!FileOperationHelper.CreateFolderIfNotExists(currentFolder))
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Cannot create folder {currentFolder} on local disk.");
return OperationStatus.FolderCreationError;
}
}
var metricNames = string.IsNullOrWhiteSpace(metricName)
? await reader.GetMetricNamesAsync(monitoringAccount.Name, currentNamespace).ConfigureAwait(false)
: new[] { metricName };
if (metricNames == null || metricNames.Count == 0)
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"No metric name found under {currentNamespace}.");
continue;
}
using (var throttler = new SemaphoreSlim(this.MaxParallelRunningTasks))
{
foreach (var currentMetric in metricNames)
{
if (metricNameRegex != null && !metricNameRegex.IsMatch(currentMetric))
{
continue;
}
await throttler.WaitAsync().ConfigureAwait(false);
getMetricsTaskList.Add(Task.Run(async () =>
{
try
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Getting metric {currentMetric} in namespace {currentNamespace} ...");
return await this.GetAsync(monitoringAccount, currentNamespace, currentMetric)
.ConfigureAwait(false);
}
finally
{
throttler.Release();
}
}));
totalMetricsCount++;
}
try
{
await Task.WhenAll(getMetricsTaskList).ConfigureAwait(false);
}
catch
{
var getExceptionsTaskList = getMetricsTaskList.Where(t => t.Exception != null);
foreach (var exTask in getExceptionsTaskList)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"GetMetricsTasks Exception thrown : {exTask.Exception.Flatten()}");
}
operationResult = OperationStatus.ResourceGetError;
}
}
foreach (var successTask in getMetricsTaskList.Where(t => t.Exception == null))
{
// Calculate count of successfully retrieved metrics for processing
retrievedMetricsCount++;
var downloadedMetric = successTask.Result;
var processResult = this.ProcessRetrievedMetrics(
downloadedMetric,
monitoringAccount.Name,
downloadDefaultMetricConfig,
maxFileNameProducedLength,
currentFolder);
// Update current non-error result if any error results returned
if ((operationResult == OperationStatus.CompleteSuccess || operationResult == OperationStatus.ResourceSkipped)
&& processResult != OperationStatus.CompleteSuccess)
{
operationResult = processResult;
}
if (processResult == OperationStatus.ResourceSkipped)
{
skippedMetricsCount++;
}
}
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Metrics under namespace {currentNamespace} are processed.");
getMetricsTaskList.Clear();
}
Logger.Log(
LoggerLevel.CustomerFacingInfo,
LogId,
logTag,
$"Detail statistics : For account {monitoringAccount.Name} , totally {totalMetricsCount} metrics are requested, {retrievedMetricsCount} metrics configuration are retrieved, {retrievedMetricsCount - skippedMetricsCount} metrics are saved as files.");
if (totalMetricsCount == 0 || retrievedMetricsCount == skippedMetricsCount)
{
return OperationStatus.ResourceNotFound;
}
return operationResult;
}
/// <inheritdoc />
public async Task<OperationStatus> ReplaceAccountNameInMetricConfigurationFilesAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
string replaceAccountNameWith,
Regex metricNameRegex = null)
{
return await this.ModifyMetricConfigurationFilesAsync(
sourceFolder,
monitoringAccount,
metricNameRegex,
replaceAccountNameWith,
replaceNamespaceWith: null).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<OperationStatus> ReplaceNamespaceInMetricConfigurationFilesAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
string replaceNamespaceWith,
Regex metricNameRegex = null)
{
return await this.ModifyMetricConfigurationFilesAsync(
sourceFolder,
monitoringAccount,
metricNameRegex,
replaceAccountNameWith: null,
replaceNamespaceWith: replaceNamespaceWith).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<OperationStatus> UploadMetricConfigurationAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
bool force = false)
{
const string logTag = "UploadMetricConfigurationAsync";
const string jsonFileExtension = "*.json";
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Folder to read is {sourceFolder}");
var operationResult = OperationStatus.CompleteSuccess;
var uploadTaskList = new List<Task>();
var totalFilesCount = 0;
var failedFilesCount = 0;
if (!force)
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, "Version check is enabled, Server will increment uploaded metric configuration version by 1.");
}
using (var throttler = new SemaphoreSlim(this.MaxParallelRunningTasks))
{
foreach (var currentConfigFile in Directory.EnumerateFiles(sourceFolder, jsonFileExtension))
{
IMetricConfiguration metricConfigFromFile;
try
{
metricConfigFromFile = this.ReadFileAsMetricConfiguration(currentConfigFile);
if (!ConfigFileValidator.ValidateMetricConfigFromFile(metricConfigFromFile))
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Metric Config file {currentConfigFile} failed validation.");
operationResult = OperationStatus.FileCorrupted;
continue;
}
}
catch (Exception)
{
operationResult = OperationStatus.FileCorrupted;
continue;
}
totalFilesCount++;
await throttler.WaitAsync().ConfigureAwait(false);
uploadTaskList.Add(Task.Run(async () =>
{
try
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Uploading metric configuration from config file {currentConfigFile}");
await this.SaveAsync(monitoringAccount, metricConfigFromFile, force);
}
finally
{
throttler.Release();
}
}));
}
try
{
await Task.WhenAll(uploadTaskList).ConfigureAwait(false);
}
catch
{
var exceptionTaskList = uploadTaskList.Where(t => t.Exception != null);
foreach (var exTask in exceptionTaskList)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Upload Task throw Exception : {exTask.Exception.Flatten()}");
failedFilesCount++;
}
operationResult = OperationStatus.ResourcePostError;
}
}
Logger.Log(
LoggerLevel.CustomerFacingInfo,
LogId,
logTag,
$"Detail statistics : Total {totalFilesCount} config files are correctly parsed and pending for upload. {totalFilesCount - failedFilesCount} configs are uploaded successfully.");
return operationResult;
}
/// <inheritdoc />
public async Task<OperationStatus> ApplyTemplateMetricConfigurationAsync(
string templateFilePath,
IMonitoringAccount monitoringAccount,
string metricNamespace = null,
string metricName = null,
Regex metricNameRegex = null,
bool force = false,
bool whatIf = false)
{
const string logTag = "ApplyTemplateMetricConfigurationAsync";
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"The template file path is {templateFilePath}.");
IMetricConfiguration metricConfigTemplate;
try
{
metricConfigTemplate = this.ReadFileAsMetricConfiguration(templateFilePath);
if (!ConfigFileValidator.ValidateMetricConfigFromFile(metricConfigTemplate))
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Template file {templateFilePath} failed validation.");
return OperationStatus.FileCorrupted;
}
}
catch (Exception ex)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Template file {templateFilePath} is corrupted for parsing. Exception: {ex}");
return OperationStatus.FileCorrupted;
}
if (whatIf)
{
var templateString = JsonConvert.SerializeObject(metricConfigTemplate, Formatting.Indented, this.serializerSettings);
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"The template configuration to apply is :\n{templateString}");
return OperationStatus.CompleteSuccess;
}
var reader = new MetricReader(this.connectionInfo);
IReadOnlyList<string> namespaces = string.IsNullOrWhiteSpace(metricNamespace)
? await reader.GetNamespacesAsync(monitoringAccount.Name).ConfigureAwait(false)
: new[] { metricNamespace };
if (namespaces == null || namespaces.Count == 0)
{
Logger.Log(LoggerLevel.Warning, LogId, logTag, $"No namespace is found under {monitoringAccount.Name}!");
return OperationStatus.ResourceNotFound;
}
var operationResult = OperationStatus.CompleteSuccess;
var uploadTaskList = new List<Task>();
var totalMetricsCount = 0;
var skippedMetricsCount = 0;
var failedMetricsCount = 0;
var metricsToApplyCount = 0;
using (var throttler = new SemaphoreSlim(this.MaxParallelRunningTasks))
{
foreach (var currentNamespace in namespaces)
{
var metricNames = string.IsNullOrWhiteSpace(metricName)
? await reader.GetMetricNamesAsync(monitoringAccount.Name, currentNamespace).ConfigureAwait(false)
: new[] { metricName };
if (metricNames == null || metricNames.Count == 0)
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"No metric name found under {currentNamespace}.");
continue;
}
foreach (var currentMetric in metricNames)
{
if (metricNameRegex != null && !metricNameRegex.IsMatch(currentMetric))
{
continue;
}
totalMetricsCount++;
if (!force)
{
try
{
var metricOnServer = await this.GetAsync(monitoringAccount, metricNamespace, metricName).ConfigureAwait(false);
bool isDefaultMetric = this.IsDefaultMetric(metricOnServer);
// Apply template will not overwrite existing configuration on server unless force option is enabled.
if (!isDefaultMetric)
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, "Existing metric configuration is detected on server. Skip applying template.");
skippedMetricsCount++;
continue;
}
}
catch (Exception ex)
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Exception getting metric configuration from server. Skip applying template. Exceptions: {ex}");
continue;
}
}
var metricConfigToUpload = this.ApplyTemplateConfigWithDifferentMetric(metricConfigTemplate, currentNamespace, currentMetric);
await throttler.WaitAsync().ConfigureAwait(false);
uploadTaskList.Add(Task.Run(async () =>
{
try
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Uploading template metric configuration to Metric [{monitoringAccount.Name}][{currentNamespace}][{currentMetric}]");
await this.SaveAsync(monitoringAccount, metricConfigToUpload, force);
}
finally
{
throttler.Release();
}
}));
}
}
try
{
metricsToApplyCount = uploadTaskList.Count;
await Task.WhenAll(uploadTaskList).ConfigureAwait(false);
}
catch
{
var exceptionTaskList = uploadTaskList.Where(t => t.Exception != null);
foreach (var exTask in exceptionTaskList)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Apply Template upload task throw Exception : {exTask.Exception.Flatten()}");
failedMetricsCount++;
}
operationResult = OperationStatus.ResourcePostError;
}
}
Logger.Log(
LoggerLevel.CustomerFacingInfo,
LogId,
logTag,
$"Detail statistics : {metricsToApplyCount - failedMetricsCount} metrics are successfully applied.\nTotal {totalMetricsCount} matching metrics are requested for applying template. {skippedMetricsCount} metrics are skipped due to already existence on server.");
if (totalMetricsCount == 0 || skippedMetricsCount == totalMetricsCount)
{
return OperationStatus.ResourceNotFound;
}
return operationResult;
}
/// <inheritdoc />
public async Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncAllMetricsAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace = null,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
var metricReader = new MetricReader(this.connectionInfo);
IReadOnlyList<string> namespaces;
if (string.IsNullOrWhiteSpace(metricNamespace))
{
namespaces =
await metricReader.GetNamespacesAsync(monitoringAccount.Name).ConfigureAwait(false);
}
else
{
namespaces = new List<string> { metricNamespace };
}
var taskList = new List<Task<ConfigurationUpdateResultList>>(this.MaxParallelRunningTasks);
var results = new List<ConfigurationUpdateResultList>();
foreach (var ns in namespaces)
{
IReadOnlyList<string> metricNames =
await metricReader.GetMetricNamesAsync(
monitoringAccount.Name,
ns).ConfigureAwait(false);
foreach (var metricName in metricNames)
{
taskList.Add(
this.SyncMetricConfigurationAsync(
monitoringAccount,
ns,
metricName,
skipVersionCheck));
if (taskList.Count == this.MaxParallelRunningTasks)
{
await this.WaitAllForSyncAllAsyncV2(taskList, results).ConfigureAwait(false);
taskList.Clear();
}
}
}
if (taskList.Count > 0)
{
await this.WaitAllForSyncAllAsyncV2(taskList, results).ConfigureAwait(false);
taskList.Clear();
}
return results;
}
/// <inheritdoc />
public async Task<ConfigurationUpdateResultList> SyncMetricConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
string metricName,
bool skipVersionCheck = false)
{
if (monitoringAccount == null)
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrWhiteSpace(metricName))
{
throw new ArgumentNullException(nameof(metricName));
}
var operation = $"{this.metricUrlPrefix}/replicateMetricConfigurationToMirrorAccounts";
var path =
$"{operation}/monitoringAccount/{monitoringAccount.Name}/metricNamespace/{SpecialCharsHelper.EscapeTwice(metricNamespace)}";
var uriBuilder = new UriBuilder(this.connectionInfo.GetGlobalEndpoint())
{
Path = path,
Query = $"metricName={metricName}&skipVersionCheck={skipVersionCheck}"
};
var result = new ConfigurationUpdateResultList
{
MonitoringAccount = monitoringAccount.Name,
MetricNamespace = metricNamespace,
MetricName = metricName
};
try
{
var response = await HttpClientHelper.GetResponse(
uriBuilder.Uri,
HttpMethod.Post,
this.httpClient,
monitoringAccount.Name,
operation).ConfigureAwait(false);
result.ConfigurationUpdateResults =
JsonConvert.DeserializeObject<ConfigurationUpdateResult[]>(
response.Item1,
this.serializerSettings);
foreach (var updateResult in result.ConfigurationUpdateResults)
{
if (!updateResult.Success)
{
result.Success = false;
result.ExceptionMessage = updateResult.Message;
return result;
}
}
result.Success = true;
return result;
}
catch (MetricsClientException mce)
{
result.Success = false;
if (mce.ResponseStatusCode == HttpStatusCode.Unauthorized)
{
var exMsg =
$"Unable to sync configuration for monitoringAccount:{monitoringAccount.Name}, metricNamespace:"
+ $"{metricNamespace}, metricName:{metricName} as user"
+ $"doesn't have permission to update configurations in mirror accounts. Response:{mce.Message}";
throw new ConfigurationValidationException(exMsg, ValidationType.ServerSide, mce);
}
else
{
result.ExceptionMessage = mce.Message;
}
return result;
}
}
/// <summary>
/// Modify local metric configuration json files by replacing AccountName.
/// </summary>
/// <param name="sourceFolder">folder in which config files locate.</param>
/// <param name="monitoringAccount">Monitoring account</param>
/// <param name="metricNameRegex">metric name regex.</param>
/// <param name="replaceAccountNameWith">account name to replace original account with in local config files.</param>
/// <param name="replaceNamespaceWith">namespace to replace original namespace with in local config files.</param>
/// <returns> OperationStatus.</returns>
private async Task<OperationStatus> ModifyMetricConfigurationFilesAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
Regex metricNameRegex = null,
string replaceAccountNameWith = null,
string replaceNamespaceWith = null)
{
const string logTag = "ModifyMetricConfigurationFiles";
const string jsonFileExtension = "*.json";
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Folder to read is {sourceFolder}");
var operationResult = OperationStatus.CompleteSuccess;
var modifyConfigTaskList = new List<Task>();
var totalFilesCount = 0;
var failedFilesCount = 0;
foreach (var currentConfigFile in Directory.EnumerateFiles(sourceFolder, jsonFileExtension))
{
IMetricConfiguration metricConfigFromFile;
try
{
metricConfigFromFile = this.ReadFileAsMetricConfiguration(currentConfigFile);
if (metricNameRegex != null && !metricNameRegex.IsMatch(metricConfigFromFile.Name))
{
continue;
}
}
catch (Exception)
{
operationResult = OperationStatus.FileCorrupted;
continue;
}
totalFilesCount++;
modifyConfigTaskList.Add(
this.ModifyMetricConfigurationAsync(
currentConfigFile,
metricConfigFromFile,
monitoringAccount,
replaceAccountNameWith,
replaceNamespaceWith));
}
try
{
await Task.WhenAll(modifyConfigTaskList).ConfigureAwait(false);
}
catch
{
var modifyExceptionsTaskList = modifyConfigTaskList.Where(t => t.Exception != null);
foreach (var exTask in modifyExceptionsTaskList)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"modifyConfigTasks Exceptions thrown : {exTask.Exception.Flatten()}");
failedFilesCount++;
}
operationResult = OperationStatus.FileSaveError;
}
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Detail statistics : Total {totalFilesCount} config files are correctly read and processed. {totalFilesCount - failedFilesCount} config files are modified successfully.");
return operationResult;
}
/// <summary>
/// Read a metric config file and deserialize as IMetricConfiguration.
/// </summary>
/// <param name="filePath">metric config file to read</param>
/// <returns>object of IMetricConfiguration</returns>
private IMetricConfiguration ReadFileAsMetricConfiguration(string filePath)
{
const string logTag = "ReadFileAsMetricConfiguration";
string content = File.ReadAllText(filePath);
IMetricConfiguration metricConfigurationFromFile;
try
{
metricConfigurationFromFile =
JsonConvert.DeserializeObject<RawMetricConfiguration>(content, this.serializerSettings);
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Processing file {filePath}, this is raw metric.");
}
catch
{
try
{
metricConfigurationFromFile =
JsonConvert.DeserializeObject<CompositeMetricConfiguration>(content, this.serializerSettings);
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Processing file {filePath}, this is composite metric.");
}
catch (JsonSerializationException jex)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Cannot deserialize Json file {filePath}. Exceptions : {jex}");
throw;
}
}
return metricConfigurationFromFile;
}
/// <summary>
/// Process sucessfully retrived metric configuration objects.
/// </summary>
/// <param name="downloadedMetric">metric configuration</param>
/// <param name="accountName">account name</param>
/// <param name="downloadDefaultMetricConfig">whether save default retrieved metric configuration</param>
/// <param name="maxFileNameProducedLength">max size of file name length</param>
/// <param name="curFolder">folder to store this metric configuration</param>
/// <returns>OperationStatus</returns>
private OperationStatus ProcessRetrievedMetrics(
IMetricConfiguration downloadedMetric,
string accountName,
bool downloadDefaultMetricConfig,
int maxFileNameProducedLength,
string curFolder)
{
const string logTag = "ProcessRetrievedMetrics";
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Processing retrieved metric {downloadedMetric.Name} configuration.");
if (!downloadDefaultMetricConfig)
{
if (downloadedMetric.LastUpdatedTime == default(DateTime))
{
Logger.Log(
LoggerLevel.CustomerFacingInfo,
LogId,
logTag,
$"Skipping default metric config for metric {downloadedMetric.Name} in namespace {downloadedMetric.MetricNamespace}");
return OperationStatus.ResourceSkipped;
}
}
var fileName = FileNamePathHelper.ConstructValidFileName(
accountName,
downloadedMetric.MetricNamespace,
downloadedMetric.Name,
string.Empty,
FileNamePathHelper.JsonFileExtension,
maxFileNameProducedLength);
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Saving metric config file {fileName} ...");
try
{
FileOperationHelper.SaveContentToFile(
Path.Combine(curFolder, fileName),
JsonConvert.SerializeObject(downloadedMetric, Formatting.Indented, this.serializerSettings));
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Saved metric config file {fileName}.");
return OperationStatus.CompleteSuccess;
}
catch (Exception ex)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Failed writing file {fileName}. Exception: {ex}");
return OperationStatus.FileSaveError;
}
}
/// <summary>
/// Async modify IMetricConfiguration and save to file
/// </summary>
/// <param name="configFile">configuration file to save modified config</param>
/// <param name="metricConfigFromFile">original IMetricConfiguration read from file</param>
/// <param name="monitoringAccount">MonitoringAccount</param>
/// <param name="replaceAccountNameWith">account name to replace original account with in local config files</param>
/// <param name="replaceNamespaceWith">namespace to replace original namespace with in local config files</param>
/// <returns>return a Task</returns>
private async Task ModifyMetricConfigurationAsync(
string configFile,
IMetricConfiguration metricConfigFromFile,
IMonitoringAccount monitoringAccount,
string replaceAccountNameWith,
string replaceNamespaceWith)
{
const string logTag = "ModifyMetricConfigurationAsync";
IMetricConfiguration newMetricConfig;
if (metricConfigFromFile is RawMetricConfiguration)
{
newMetricConfig = this.CopyAndReplaceRawMetricConfig(
(RawMetricConfiguration)metricConfigFromFile,
replaceAccountNameWith,
replaceNamespaceWith);
}
else
{
newMetricConfig = this.CopyAndReplaceCompositeMetricConfig(
(CompositeMetricConfiguration)metricConfigFromFile,
monitoringAccount,
replaceAccountNameWith,
replaceNamespaceWith);
}
try
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, logTag, $"Writing file {configFile}...");
await FileOperationHelper.SaveContentToFileAsync(
configFile,
JsonConvert.SerializeObject(newMetricConfig, Formatting.Indented, this.serializerSettings)).ConfigureAwait(false);
}
catch
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Exceptions in modifying local file {configFile}.");
throw;
}
}
/// <summary>
/// Copy original RawMetricConfiguration and do modification.
/// </summary>
/// <param name="rawMetricConfigFromFile">original RawMetricConfiguration from file</param>
/// <param name="replaceAccountNameWith">account name to replace original account with in local config files</param>
/// <param name="replaceNamespaceWith">namespace to replace original namespace with in local config files</param>
/// <returns>modified RawMetricConfiguration</returns>
private RawMetricConfiguration CopyAndReplaceRawMetricConfig(
RawMetricConfiguration rawMetricConfigFromFile,
string replaceAccountNameWith,
string replaceNamespaceWith)
{
if (!string.IsNullOrWhiteSpace(replaceAccountNameWith))
{
Logger.Log(LoggerLevel.CustomerFacingInfo, LogId, "CopyAndReplaceRawMetricConfig", $"Metric {rawMetricConfigFromFile.Name} is RawMetricConfiguration and ReplaceAccountNameWith is not applicable.");
}
var newMetricNamespace = string.IsNullOrWhiteSpace(replaceNamespaceWith)
? rawMetricConfigFromFile.MetricNamespace
: replaceNamespaceWith;
return new RawMetricConfiguration(
newMetricNamespace,
rawMetricConfigFromFile.Name,
rawMetricConfigFromFile.LastUpdatedTime,
rawMetricConfigFromFile.LastUpdatedBy,
rawMetricConfigFromFile.Version,
rawMetricConfigFromFile.ScalingFactor,
rawMetricConfigFromFile.EnableClientPublication,
rawMetricConfigFromFile.EnableClientForking,
rawMetricConfigFromFile.Description,
rawMetricConfigFromFile.Dimensions,
rawMetricConfigFromFile.Preaggregations,
rawMetricConfigFromFile.RawSamplingTypes,
rawMetricConfigFromFile.ComputedSamplingTypes,
rawMetricConfigFromFile.EnableClientSideLastSamplingMode,
rawMetricConfigFromFile.EnableClientEtwPublication);
}
/// <summary>
/// Copy original CompositeMetricConfiguration and do modification.
/// </summary>
/// <param name="compositeMetricConfigFromFile">original CompositeMetricConfiguration from file</param>
/// <param name="monitoringAccount">MonitoringAccount</param>
/// <param name="replaceAccountNameWith">account name to replace original account with in local config files</param>
/// <param name="replaceNamespaceWith">namespace to replace original namespace with in local config files</param>
/// <returns>modified CompositeMetricConfiguration</returns>
private CompositeMetricConfiguration CopyAndReplaceCompositeMetricConfig(
CompositeMetricConfiguration compositeMetricConfigFromFile,
IMonitoringAccount monitoringAccount,
string replaceAccountNameWith,
string replaceNamespaceWith)
{
var newAccountName = string.IsNullOrWhiteSpace(replaceAccountNameWith)
? monitoringAccount.Name
: replaceAccountNameWith;
var newMetricNamespace = string.IsNullOrWhiteSpace(replaceNamespaceWith)
? compositeMetricConfigFromFile.MetricNamespace
: replaceNamespaceWith;
// deep copy metric sources by replacing matching account name and namespace at same time
var newMetricSources = new List<CompositeMetricSource>();
foreach (var source in compositeMetricConfigFromFile.MetricSources)
{
var newSourceAccountName =
source.MonitoringAccount.Equals(monitoringAccount.Name, StringComparison.OrdinalIgnoreCase)
? newAccountName
: source.MonitoringAccount;
var newSourceNamespace =
source.MetricNamespace.Equals(compositeMetricConfigFromFile.MetricNamespace, StringComparison.OrdinalIgnoreCase)
? newMetricNamespace
: source.MetricNamespace;
newMetricSources.Add(new CompositeMetricSource(source.DisplayName, newSourceAccountName, newSourceNamespace, source.Metric));
}
var newCompositeMetricConfig = new CompositeMetricConfiguration(
newMetricNamespace,
compositeMetricConfigFromFile.Name,
compositeMetricConfigFromFile.LastUpdatedTime,
compositeMetricConfigFromFile.LastUpdatedBy,
compositeMetricConfigFromFile.Version,
compositeMetricConfigFromFile.TreatMissingSeriesAsZeroes,
compositeMetricConfigFromFile.Description,
newMetricSources,
compositeMetricConfigFromFile.CompositeExpressions);
return newCompositeMetricConfig;
}
/// <summary>
/// Apply template IMetricConfiguration object by replacing MetricNamespace and Name.
/// </summary>
/// <param name="metricConfigTemplate">template IMetricConfiguration read from file</param>
/// <param name="targetNamespace">target MetricNamespace</param>
/// <param name="targetMetricName">target metric Name</param>
/// <returns>IMetricConfiguration</returns>
private IMetricConfiguration ApplyTemplateConfigWithDifferentMetric(
IMetricConfiguration metricConfigTemplate,
string targetNamespace,
string targetMetricName)
{
IMetricConfiguration newMetricConfig;
if (metricConfigTemplate is RawMetricConfiguration)
{
var rawMetricConfigTemplate = (RawMetricConfiguration)metricConfigTemplate;
newMetricConfig = new RawMetricConfiguration(
targetNamespace,
targetMetricName,
rawMetricConfigTemplate.LastUpdatedTime,
rawMetricConfigTemplate.LastUpdatedBy,
rawMetricConfigTemplate.Version,
rawMetricConfigTemplate.ScalingFactor,
rawMetricConfigTemplate.EnableClientPublication,
rawMetricConfigTemplate.EnableClientForking,
rawMetricConfigTemplate.Description,
rawMetricConfigTemplate.Dimensions,
rawMetricConfigTemplate.Preaggregations,
rawMetricConfigTemplate.RawSamplingTypes,
rawMetricConfigTemplate.ComputedSamplingTypes,
rawMetricConfigTemplate.EnableClientSideLastSamplingMode,
rawMetricConfigTemplate.EnableClientEtwPublication);
}
else
{
var compositeMetricConfigTemplate = (CompositeMetricConfiguration)metricConfigTemplate;
newMetricConfig = new CompositeMetricConfiguration(
targetNamespace,
targetMetricName,
compositeMetricConfigTemplate.LastUpdatedTime,
compositeMetricConfigTemplate.LastUpdatedBy,
compositeMetricConfigTemplate.Version,
compositeMetricConfigTemplate.TreatMissingSeriesAsZeroes,
compositeMetricConfigTemplate.Description,
compositeMetricConfigTemplate.MetricSources,
compositeMetricConfigTemplate.CompositeExpressions);
}
return newMetricConfig;
}
/// <summary>
/// true if this is a default configuration.
/// </summary>
/// <param name="metricConfiguration">Monitoring Account</param>
/// <returns>true if default configuration returned.</returns>
private bool IsDefaultMetric(IMetricConfiguration metricConfiguration)
{
const string logTag = "IsDefaultMetricAsync";
if (metricConfiguration == null)
{
Logger.Log(LoggerLevel.Error, LogId, logTag, $"Argument MetricConfiguration [{metricConfiguration.MetricNamespace}][{metricConfiguration.Name}] is NULL.");
throw new ArgumentNullException(nameof(metricConfiguration));
}
return metricConfiguration.LastUpdatedTime == default(DateTime);
}
/// <summary>
/// A helper method which waits for all given sync all tasks to complete.
/// </summary>
/// <param name="taskList">The task list.</param>
/// <param name="results">The results.</param>
/// <returns>An awaitable <see cref="Task"/>.</returns>
private async Task WaitAllForSyncAllAsyncV2(
List<Task<ConfigurationUpdateResultList>> taskList,
List<ConfigurationUpdateResultList> results)
{
const string logTag = "SyncAllAsyncV2";
try
{
await Task.WhenAll(taskList).ConfigureAwait(false);
foreach (var task in taskList)
{
if (task.Result.Success)
{
// For metric configuration in QOS or other internal namespace, there is no configuration
// to replicate and thus the updates results is an empty list.
if (task.Result.ConfigurationUpdateResults.Count > 0)
{
results.Add(task.Result);
}
}
else
{
if (task.Result.ExceptionMessage.Contains(
"Event configuration to be updated can't be null."))
{
// No configuration exist for the specified metrics and hence nothing
// to replicate.
continue;
}
results.Add(task.Result);
}
}
}
catch (Exception ex)
{
Logger.Log(
LoggerLevel.Error,
LogId,
logTag,
$"Exception occured while replicating configuration. Exception: {ex}");
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricFilterDetails.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Configuration
{
/// <summary>
/// The details of the filter in order to filter the hints.
/// </summary>
public sealed class MetricFilterDetails
{
/// <summary>
/// The prefix to be used before a sampling type in the <see cref="Expression"/>.
/// </summary>
public const string Prefix = "raw";
/// <summary>
/// The expression to be applied on each and every minutely data within the specified time interval.
/// Example: <see cref="Prefix"/>.Sum > 100 && <see cref="Prefix"/>.Count < 2
/// </summary>
/// <remarks>
/// The filter expression in <see cref="MetricFilterDetails.Expression"/> is applied on minutely values
/// and the value of the dimension is returned even if the one of the minutely data returns true for the expression provided.
/// </remarks>
public string Expression { get; set; }
/// <summary>
/// Validates the contents of this instance.
/// </summary>
/// <returns>True if the filter is valid otherwise false.</returns>
public bool IsValid()
{
return this.Expression != null && !string.IsNullOrEmpty(this.Expression.Trim());
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="FilteredTimeSeriesQueryRequest.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using System.Collections.Generic;
using Metrics;
using Newtonsoft.Json;
using Online.Metrics.Serialization.Configuration;
/// <summary>
/// Request to query data with dimensional filters and at Top N condition and retrieve the
/// resultant series.
/// </summary>
public sealed class FilteredTimeSeriesQueryRequest
{
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeriesQueryRequest" /> class.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="seriesResolutionInMinutes">The series resolution in minutes.</param>
/// <param name="aggregationType">Type of the aggregation.</param>
/// <param name="topPropertyDefinition">The top property definition.</param>
/// <param name="numberOfResultsToReturn">The number of results to return.</param>
/// <param name="orderBy">The order by.</param>
/// <param name="zeroAsNoValueSentinel">Indicates whether zero should be used as no value sentinel, or double.NaN.</param>
/// <param name="outputDimensionNames">The dimension names to be used for the result time series. If not set, same as the dimensions in the dimension filter.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.</param>
public FilteredTimeSeriesQueryRequest(
MetricIdentifier metricIdentifier,
IReadOnlyList<SamplingType> samplingTypes,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
int seriesResolutionInMinutes,
AggregationType aggregationType,
PropertyDefinition topPropertyDefinition,
int numberOfResultsToReturn,
OrderBy orderBy,
bool zeroAsNoValueSentinel,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false)
: this(
metricIdentifier,
null,
null,
null,
samplingTypes,
dimensionFilters,
startTimeUtc,
endTimeUtc,
seriesResolutionInMinutes,
aggregationType,
topPropertyDefinition,
numberOfResultsToReturn,
orderBy,
zeroAsNoValueSentinel,
false,
outputDimensionNames,
lastValueMode)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeriesQueryRequest" /> class.
/// </summary>
/// <param name="monitoringAccountNames">The monitoring account names.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="seriesResolutionInMinutes">The series resolution in minutes.</param>
/// <param name="aggregationType">Type of the aggregation.</param>
/// <param name="topPropertyDefinition">The top property definition.</param>
/// <param name="numberOfResultsToReturn">The number of results to return.</param>
/// <param name="orderBy">The order by.</param>
/// <param name="zeroAsNoValueSentinel">Indicates whether zero should be used as no value sentinel, or double.NaN.</param>
/// <param name="aggregateAcrossAccounts">if set to <c>true</c>, aggregate data across accounts, and the account name in the query results is "AccountMoniker".</param>
/// <param name="outputDimensionNames">The dimension names to be used for the result time series. If not set, same as the dimensions in the dimension filter.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode.</param>
public FilteredTimeSeriesQueryRequest(
IReadOnlyList<string> monitoringAccountNames,
string metricNamespace,
string metricName,
IReadOnlyList<SamplingType> samplingTypes,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
int seriesResolutionInMinutes,
AggregationType aggregationType,
PropertyDefinition topPropertyDefinition,
int numberOfResultsToReturn,
OrderBy orderBy,
bool zeroAsNoValueSentinel,
bool aggregateAcrossAccounts,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false)
: this(
default(MetricIdentifier),
monitoringAccountNames,
metricNamespace,
metricName,
samplingTypes,
dimensionFilters,
startTimeUtc,
endTimeUtc,
seriesResolutionInMinutes,
aggregationType,
topPropertyDefinition,
numberOfResultsToReturn,
orderBy,
zeroAsNoValueSentinel,
aggregateAcrossAccounts,
outputDimensionNames,
lastValueMode)
{
if (monitoringAccountNames == null || monitoringAccountNames.Count == 0)
{
throw new ArgumentException("must not be null or empty", nameof(monitoringAccountNames));
}
for (int i = 0; i < monitoringAccountNames.Count; i++)
{
if (string.IsNullOrWhiteSpace(monitoringAccountNames[i]))
{
throw new ArgumentException($"All monitoring accounts must not be null or empty: {string.Join(",", monitoringAccountNames)}.", nameof(monitoringAccountNames));
}
}
// Make de-serialization not throw: MetricIdentifier is a struct and in addition it doesn't allow null-or-empty property members.
this.MetricIdentifier = new MetricIdentifier(monitoringAccountNames[0], metricNamespace, metricName);
}
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeriesQueryRequest"/> class.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <remarks>For OBO, we fill in only the <see cref="MetricIdentifier"/> property on success but we do fill in the full request on failure.</remarks>
internal FilteredTimeSeriesQueryRequest(MetricIdentifier metricIdentifier)
{
this.MetricIdentifier = metricIdentifier;
}
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeriesQueryRequest" /> class.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <param name="monitoringAccountNames">The monitoring account names.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="seriesResolutionInMinutes">The series resolution in minutes.</param>
/// <param name="aggregationType">Type of the aggregation.</param>
/// <param name="topPropertyDefinition">The top property definition.</param>
/// <param name="numberOfResultsToReturn">The number of results to return.</param>
/// <param name="orderBy">The order by.</param>
/// <param name="zeroAsNoValueSentinel">Indicates whether zero should be used as no value sentinel, or double.NaN.</param>
/// <param name="aggregateAcrossAccounts">if set to <c>true</c>, aggregate data across accounts, and the account name in the query results is "AccountMoniker".</param>
/// <param name="outputDimensionNames">The dimension names to be used for the result time series. If not set, same as the dimensions in the dimension filter.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.</param>
[JsonConstructor]
private FilteredTimeSeriesQueryRequest(
MetricIdentifier metricIdentifier,
IReadOnlyList<string> monitoringAccountNames,
string metricNamespace,
string metricName,
IReadOnlyList<SamplingType> samplingTypes,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
int seriesResolutionInMinutes,
AggregationType aggregationType,
PropertyDefinition topPropertyDefinition,
int numberOfResultsToReturn,
OrderBy orderBy,
bool zeroAsNoValueSentinel,
bool aggregateAcrossAccounts,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false)
{
this.MetricIdentifier = metricIdentifier;
this.MonitoringAccountNames = monitoringAccountNames;
this.MetricNamespace = metricNamespace;
this.MetricName = metricName;
this.SamplingTypes = samplingTypes;
this.DimensionFilters = dimensionFilters;
this.StartTimeUtc = startTimeUtc;
this.EndTimeUtc = endTimeUtc;
this.SeriesResolutionInMinutes = seriesResolutionInMinutes;
this.AggregationType = aggregationType;
this.TopPropertyDefinition = topPropertyDefinition;
this.NumberOfResultsToReturn = numberOfResultsToReturn;
this.OrderBy = orderBy;
this.ZeroAsNoValueSentinel = zeroAsNoValueSentinel;
this.AggregateAcrossAccounts = aggregateAcrossAccounts;
this.OutputDimensionNames = outputDimensionNames;
this.LastValueMode = lastValueMode;
}
/// <summary>
/// Gets the monitoring accounts to be queried.
/// </summary>
public IReadOnlyList<string> MonitoringAccountNames { get; }
/// <summary>
/// Gets the metric namespace.
/// </summary>
public string MetricNamespace { get; }
/// <summary>
/// Gets the name of the metric.
/// </summary>
public string MetricName { get; }
/// <summary>
/// Gets the metric identifier to be queried.
/// </summary>
public MetricIdentifier MetricIdentifier { get; }
/// <summary>
/// Gets the sampling types to be queried.
/// </summary>
public IReadOnlyList<SamplingType> SamplingTypes { get; }
/// <summary>
/// Gets the dimension filters used to determine which time series keys should be
/// retrieved from the data store.
/// </summary>
public IReadOnlyList<DimensionFilter> DimensionFilters { get; }
/// <summary>
/// Gets the start time of the query period in UTC.
/// </summary>
public DateTime StartTimeUtc { get; }
/// <summary>
/// Gets the end time of the query period in UTC.
/// </summary>
public DateTime EndTimeUtc { get; }
/// <summary>
/// Gets the series resolution in minutes. One data point will represent x number
/// of minutes of raw data.
/// </summary>
public int SeriesResolutionInMinutes { get; }
/// <summary>
/// Gets the type of the aggregation to be performed when resolution is being reduced.
/// </summary>
public AggregationType AggregationType { get; }
/// <summary>
/// Gets the top property definition. This defines which sampling type
/// </summary>
public PropertyDefinition TopPropertyDefinition { get; }
/// <summary>
/// Gets the number of results to return.
/// </summary>
public int NumberOfResultsToReturn { get; }
/// <summary>
/// Gets the ordering of the results, either Ascending or Descending.
/// </summary>
public OrderBy OrderBy { get; }
/// <summary>
/// Indicates if zero or double.NaN should be used to indicate no value in time series data.
/// </summary>
public bool ZeroAsNoValueSentinel { get; }
/// <summary>
/// Gets a value indicationg whether to aggregate data across accounts.
/// </summary>
public bool AggregateAcrossAccounts { get; }
/// <summary>
/// Gets or sets the output dimension names to be returned with the result time series. If the output dimension names are not specified,
/// The dimensions used as dimension filters will be the output dimensions.
/// </summary>
public IReadOnlyList<string> OutputDimensionNames { get; }
/// <summary>
/// Indicates if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.
/// </summary>
public bool LastValueMode { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="TraceEvent.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Type that wraps access to the fields of an ETW event record.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics.Etw
{
using System;
using System.Diagnostics.CodeAnalysis;
/// <summary>
/// Type that wraps access to the fields of an ETW event record.
/// </summary>
internal unsafe struct TraceEvent
{
/// <summary>
/// Pointer to the native structure being wrapped.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2151:FieldsWithCriticalTypesShouldBeCriticalFxCopRule", Justification = "Not accessible to any 3rd-party MS or not")]
private readonly NativeMethods.EventRecord* eventRecord;
/// <summary>
/// Managed type that allows access to the event header.
/// </summary>
private readonly TraceEventHeader eventHeader;
/// <summary>
/// Managed type that allows access to the buffer context of the event.
/// </summary>
private readonly TraceBufferContext bufferContext;
/// <summary>
/// Initializes a new instance of the <see cref="TraceEvent"/> struct.
/// </summary>
/// <param name="eventRecord">
/// Pointer to the native structure being wrapped by the instance.
/// </param>
public TraceEvent(NativeMethods.EventRecord* eventRecord)
: this()
{
if (eventRecord == null)
{
throw new ArgumentNullException("eventRecord");
}
this.eventRecord = eventRecord;
this.eventHeader = new TraceEventHeader(&eventRecord->EventHeader);
this.bufferContext = new TraceBufferContext(&eventRecord->BufferContext);
}
/// <summary>
/// Gets the header of the event.
/// </summary>
public TraceEventHeader Header
{
get
{
return this.eventHeader;
}
}
/// <summary>
/// Gets the buffer context of the event.
/// </summary>
public TraceBufferContext BufferContext
{
get
{
return this.bufferContext;
}
}
/// <summary>
/// Gets the number of extended data structures in ExtendedData.
/// </summary>
public ushort ExtendedDataCount
{
get
{
return this.eventRecord->ExtendedDataCount;
}
}
/// <summary>
/// Gets the Size, in bytes, of the data in UserData.
/// </summary>
public ushort UserDataLength
{
get
{
return this.eventRecord->UserDataLength;
}
}
/// <summary>
/// Gets the extended data items that ETW collects if the controller sets the EnableProperty
/// parameter of EnableTraceEx. For details, see EVENT_HEADER_EXTENDED_DATA_ITEM.
/// </summary>
public IntPtr ExtendedData
{
get
{
return this.eventRecord->ExtendedData;
}
}
/// <summary>
/// Gets the event specific data. To parse this data, see Retrieving Event Data Using TDH.
/// If the Flags member of EVENT_HEADER is EVENT_HEADER_FLAG_STRING_ONLY, the data is a
/// null-terminated Unicode string that you do not need TDH to parse.
/// </summary>
public IntPtr UserData
{
get
{
return this.eventRecord->UserData;
}
}
/// <summary>
/// Gets the context specified in the Context member of the EVENT_TRACE_LOGFILE structure
/// that is passed to OpenTrace.
/// </summary>
public IntPtr UserContext
{
get
{
return this.eventRecord->UserContext;
}
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="IMetricMetadata.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
/// <summary>
/// Represents an interface for the metric metadata object.
/// </summary>
public interface IMetricMetadata
{
/// <summary>
/// Gets the namespace of the metric.
/// </summary>
string MetricNamespace { get; }
/// <summary>
/// Gets the name of the metric.
/// </summary>
string MetricName { get; }
/// <summary>
/// Gets the number of dimensions the metric has.
/// </summary>
int DimensionsCount { get; }
/// <summary>
/// Gets the name of the dimension by index.
/// </summary>
/// <param name="dimensionIndex">Index of the dimension in 0..DimensionsCount-1 range.</param>
/// <returns>Name of the dimension.</returns>
/// <exception cref="IndexOutOfRangeException">Thrown when index is out of the specified range.</exception>
string GetDimensionName(int dimensionIndex);
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="OboMetricReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Query;
using Microsoft.Cloud.Metrics.Client.Utility;
/// <summary>
/// The metrics reader class to read metrics data for OBO V2 - multiple dimension support.
/// </summary>
public sealed class OboMetricReader
{
private readonly ConnectionInfo connectionInfo;
private readonly HttpClient httpClient;
private readonly string clientId;
/// <summary>
/// Initializes a new instance of the <see cref="OboMetricReader"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
/// <param name="clientId">The string identifying client.</param>
public OboMetricReader(ConnectionInfo connectionInfo, string clientId = "OBO")
{
if (connectionInfo == null)
{
throw new ArgumentNullException(nameof(connectionInfo));
}
this.connectionInfo = connectionInfo;
this.clientId = clientId;
this.httpClient = HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo);
}
/// <summary>
/// Gets the filtered time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="numMinutes">The number minutes.</param>
/// <param name="resourceId">The resource identifier.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="categories">The categories.</param>
/// <returns>List of <see cref="IFilteredTimeSeriesQueryResponse"/>.</returns>
public async Task<IReadOnlyList<IFilteredTimeSeriesQueryResponse>> GetFilteredTimeSeriesAsync(DateTime startTimeUtc, int numMinutes, string resourceId, SamplingType[] samplingTypes, List<string> categories)
{
var startMinute = startTimeUtc.ToString("yyyy-MM-ddTHH:mmZ");
var endpoint = new Uri(this.connectionInfo.Endpoint, $"/api/getMetricsForOBO/v2/serializationVersion/{FilteredTimeSeriesQueryResponse.CurrentVersion}/startMinute/{startMinute}/numMinutes/{numMinutes}");
var traceId = Guid.NewGuid();
var httpContent = Tuple.Create(new List<string> { resourceId }, samplingTypes, categories);
var response = await HttpClientHelper.GetResponse(
endpoint,
HttpMethod.Post,
this.httpClient,
null, // TODO add support of monitoring account on server side and then pass it here
null, // TODO add support of monitoring account on server side and pass operation here
httpContent,
traceId: traceId,
clientId: this.clientId,
numAttempts: 1).ConfigureAwait(false);
string handlingRpServerId;
IReadOnlyList<IFilteredTimeSeriesQueryResponse> results;
using (HttpResponseMessage httpResponseMessage = response.Item2)
{
IEnumerable<string> handlingRpServerIdValues;
httpResponseMessage.Headers.TryGetValues("__HandlingRpServerId__", out handlingRpServerIdValues);
handlingRpServerId = handlingRpServerIdValues?.FirstOrDefault();
using (var stream = await httpResponseMessage.Content.ReadAsStreamAsync().ConfigureAwait(false))
{
results = FilteredQueryResponseDeserializer.Deserialize(stream);
}
}
foreach (var queryResponse in results)
{
queryResponse.DiagnosticInfo.TraceId = traceId.ToString("B");
queryResponse.DiagnosticInfo.HandlingServerId = handlingRpServerId;
}
return results;
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="ILogEngine.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Logging
{
using System;
/// <summary>
/// Level of the log messages.
/// </summary>
/// <remarks>
/// Level comments taken from System.Diagnostics.Tracing.EventLevel
/// </remarks>
public enum LoggerLevel
{
/// <summary>
/// This level adds standard errors that signify a problem.
/// </summary>
Error = 0,
/// <summary>
/// This level adds warning events (for example, events that are published
/// because a disk is nearing full capacity).
/// </summary>
Warning = 1,
/// <summary>
/// This level adds only customer-facing infos.
/// </summary>
CustomerFacingInfo = 2,
/// <summary>
/// This level adds informational events or messages that are not errors.
/// These events can help trace the progress or state of an application.
/// </summary>
Info = 3,
/// <summary>
/// This level adds lengthy events or messages.
/// </summary>
Debug = 4
}
/// <summary>
/// Interface that needs to be supported by any log engine to be used by the process.
/// </summary>
public interface ILogEngine : IDisposable
{
/// <summary>
/// Logs the given data according to the engine implementation.
/// </summary>
/// <param name="level">
/// Level of the log statement.
/// </param>
/// <param name="logId">
/// Log identification for classifying log statements.
/// </param>
/// <param name="tag">
/// Extra string that allows another level of classification under the log id.
/// </param>
/// <param name="format">
/// Message to be logged, it can be a format message.
/// </param>
/// <param name="objectParams">
/// Optional, any parameter to be used to build the formatted message string.
/// </param>
void Log(
LoggerLevel level,
object logId,
string tag,
string format,
params object[] objectParams);
/// <summary>
/// Checks if a log statement with the given parameters will be actually logged or
/// not. Useful to avoid expensive operations for log statements that are going to
/// be dropped by the log engine.
/// </summary>
/// <param name="level">
/// Level of the log statement.
/// </param>
/// <param name="logId">
/// Log identification for classifying log statements.
/// </param>
/// <param name="tag">
/// Extra string that allows another level of classification under the log id.
/// </param>
/// <returns>
/// True if the statement is going to be logged, false otherwise.
/// </returns>
bool IsLogged(LoggerLevel level, object logId, string tag);
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="MetricSerializer.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
/// <summary>
/// Serializer used for serializing metric data between
/// - Metrics extension and FrontEnd
/// - Metric puller service and FrontEnd
/// - FrontEnd and metrics aggregator for cache server metrics
/// </summary>
public sealed class MetricSerializer
{
/// <summary>
/// The maximum version that serializer can produce.
/// To be used internally. The external code should
/// always track their supported serializer/deserializer versions separately
/// and be independent from maximum version supported by MetricSerializer
/// </summary>
private const ushort MaxVersion = 6;
private const uint TypeSerializerFlags = 0x12020000; // Corresponds to 0001.001.0000.0001.00000000000000000 (Use string and metadata interning with variable-length integer serialization)
private const uint TempBufferSize = 1500;
private readonly Dictionary<string, uint> stringIndexes = new Dictionary<string, uint>(StringComparer.OrdinalIgnoreCase);
private readonly List<string> strings = new List<string>();
private readonly Dictionary<IMetricMetadata, uint> metadataIndexes = new Dictionary<IMetricMetadata, uint>(MetricEqualityComparer.Instance);
private readonly List<IMetricMetadata> metadatas = new List<IMetricMetadata>();
private readonly ushort hllSerializationVersion;
private readonly bool estimatePacketSize;
private readonly float stringCharEstimatedSizeInBytes;
private ushort version;
private uint nextStringIndex;
private uint nextMetadataIndex;
private long currentMetricDataBlockSize;
private long currentMetadataDictionaryBlockSize;
private long currentStringDictionaryBlockSize;
private byte[] tempBuffer;
/// <summary>
/// Initializes a new instance of the <see cref="MetricSerializer"/> class.
/// </summary>
/// <param name="version">Serialization version to use (from 0 to MaxVersion).</param>
/// <param name="hllSerializationVersion">Hll serialization version till whi</param>
/// <param name="estimatePacketSize">Flag to know whether to do packet size estimate calculations as data is serialized.</param>
/// <param name="stringCharEstimatedSizeInBytes">Estimated size in bytes for each character in strings used inside the packet.</param>
public MetricSerializer(ushort version = 1, ushort hllSerializationVersion = 1, bool estimatePacketSize = false, float stringCharEstimatedSizeInBytes = 1.5f)
{
if (version > MaxVersion)
{
throw new ArgumentException("Version number is greated than maximum current version supported: " + MaxVersion, nameof(version));
}
if (stringCharEstimatedSizeInBytes > 2 || stringCharEstimatedSizeInBytes < 1)
{
throw new ArgumentException($"{nameof(stringCharEstimatedSizeInBytes)} cannot be greater than 2 and less than 1");
}
this.version = version;
this.hllSerializationVersion = hllSerializationVersion;
this.estimatePacketSize = estimatePacketSize;
this.stringCharEstimatedSizeInBytes = stringCharEstimatedSizeInBytes;
}
/// <summary>
/// Gets or sets the serialization version with which serializer is currently configured.
/// </summary>
public ushort SerializationVersion
{
get
{
return this.version;
}
set
{
if (value > MaxVersion)
{
throw new ArgumentException("Version number is greated than maximum current version supported: " + MaxVersion, nameof(value));
}
this.version = value;
}
}
/// <summary>
/// Gets the expected size of the package in current serialization state.
/// </summary>
public long ExpectedPackageSize
{
get
{
if (this.estimatePacketSize)
{
return this.currentMetricDataBlockSize + this.currentMetadataDictionaryBlockSize + this.currentStringDictionaryBlockSize +
SerializationUtils.EstimateUInt32InBase128Size((uint)this.strings.Count) +
SerializationUtils.EstimateUInt32InBase128Size((uint)this.metadatas.Count);
}
throw new InvalidOperationException("Packet size estimation is not enabled, use estimatePacketSize:true when creating the serializer");
}
}
/// <summary>
/// Serializes counter (metric) data to the stream.
/// </summary>
/// <param name="stream">Stream to which data should be serialized. Stream should be writable and provide random access.</param>
/// <param name="metricData">Collection of metric data to be serialized.</param>
public void Serialize(Stream stream, IEnumerable<IReadOnlyMetric> metricData)
{
if (!stream.CanWrite || !stream.CanSeek)
{
throw new ArgumentException("Stream should be writable and provide random access.", nameof(stream));
}
try
{
using (var writer = new NoCloseBinaryWriter(stream, Encoding.UTF8))
{
var startStreamPosition = stream.Position;
// Write version and type serializers info
writer.Write(this.version);
long crcOffSet = 0;
long crcBodyOffSet = 0;
if (this.version >= 5)
{
// Add CRC
crcOffSet = stream.Position;
writer.Write((uint)0);
crcBodyOffSet = stream.Position;
}
writer.Write(TypeSerializerFlags);
// Reserve place to write type serializers data sections offsets
var offsetsPosition = stream.Position;
stream.Position += 2 * sizeof(long);
// Write metrics data
this.WriteMetricsData(writer, metricData);
// Write cached metrics metadata and offset
var serializerDataPosition = stream.Position;
stream.Position = offsetsPosition;
writer.Write(serializerDataPosition - startStreamPosition);
offsetsPosition = stream.Position;
stream.Position = serializerDataPosition;
SerializationUtils.WriteUInt32AsBase128(writer, (uint)this.metadatas.Count);
this.metadatas.ForEach(m => this.WriteMetricMetadata(writer, m));
// Write cached strings and offset
serializerDataPosition = stream.Position;
stream.Position = offsetsPosition;
writer.Write(serializerDataPosition - startStreamPosition);
stream.Position = serializerDataPosition;
SerializationUtils.WriteUInt32AsBase128(writer, (uint)this.strings.Count);
this.strings.ForEach(writer.Write);
var endOfStream = stream.Position;
if (this.version >= 5)
{
stream.Position = crcBodyOffSet;
var crc = Crc.ComputeCrc(0, stream, stream.Length - crcBodyOffSet);
stream.Position = crcOffSet;
writer.Write(crc);
}
stream.Position = endOfStream;
}
}
catch (IOException ioException)
{
throw new MetricSerializationException("Failed to serialize data.", ioException);
}
finally
{
this.nextStringIndex = 0;
this.stringIndexes.Clear();
this.strings.Clear();
this.nextMetadataIndex = 0;
this.metadataIndexes.Clear();
this.metadatas.Clear();
this.currentMetricDataBlockSize = 0;
this.currentMetadataDictionaryBlockSize = 0;
this.currentStringDictionaryBlockSize = 0;
}
}
private long EstimateStringSize(string value)
{
// Assuming 50 % characters are non-ascii
return (int)(value.Length * this.stringCharEstimatedSizeInBytes) + SerializationUtils.EstimateUInt32InBase128Size((uint)value.Length);
}
private void WriteMetricsData(BinaryWriter writer, IEnumerable<IReadOnlyMetric> metricData)
{
Stream writerStream = writer.BaseStream;
long currentTimeInMinutes = 0;
if (this.version >= 5)
{
var currentTimeInTicks = DateTime.UtcNow.Ticks;
currentTimeInMinutes = (currentTimeInTicks - (currentTimeInTicks % SerializationUtils.OneMinuteInterval)) / SerializationUtils.OneMinuteInterval;
SerializationUtils.WriteUInt64AsBase128(writer, (ulong)currentTimeInMinutes);
}
var metricsCountPosition = writer.BaseStream.Position;
writer.Write((uint)0);
uint metricsCount = 0;
foreach (var data in metricData)
{
++metricsCount;
var metadata = data.MetricMetadata;
this.WriteMetricMetadataIndex(writer, metadata);
// In versions 0-2 Monitoring Account and Metric Namespace was part of the Metric data
// From version 3 Monitoring Account is removed and Metric Namespace became a part of Metric Metadata
if (this.version < 3)
{
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(data.MonitoringAccount));
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(data.MetricNamespace));
}
// In version 0 we had EventId, which was always passed as empty string
if (this.version == 0)
{
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(string.Empty));
}
if (this.version >= 5)
{
var timeTicks = data.TimeUtc.Ticks;
long timeInMinutes = (timeTicks - (timeTicks % SerializationUtils.OneMinuteInterval)) / SerializationUtils.OneMinuteInterval;
SerializationUtils.WriteInt64AsBase128(writer, currentTimeInMinutes - timeInMinutes);
}
else
{
SerializationUtils.WriteUInt64AsBase128(writer, (ulong)data.TimeUtc.Ticks);
}
for (byte j = 0; j < data.MetricMetadata.DimensionsCount; ++j)
{
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(data.GetDimensionValue(j)));
}
var samplingTypes = data.SamplingTypes;
bool useDouble = false;
bool storeDoubleAsLong = false;
if ((samplingTypes & SamplingTypes.DoubleValueType) != 0)
{
useDouble = true;
if (data.SumUnion.CanRepresentDoubleAsLong() &&
((samplingTypes & SamplingTypes.Min) == 0 ||
(data.MinUnion.CanRepresentDoubleAsLong() && data.MaxUnion.CanRepresentDoubleAsLong())))
{
samplingTypes = samplingTypes | SamplingTypes.DoubleValueStoredAsLongType;
storeDoubleAsLong = true;
}
}
SerializationUtils.WriteUInt32AsBase128(writer, (uint)samplingTypes);
if ((data.SamplingTypes & SamplingTypes.Min) != 0)
{
this.WriteMetricValue(data.MinUnion, useDouble, storeDoubleAsLong, writer);
}
if ((data.SamplingTypes & SamplingTypes.Max) != 0)
{
this.WriteMetricValue(data.MaxUnion, useDouble, storeDoubleAsLong, writer);
}
if ((data.SamplingTypes & SamplingTypes.Sum) != 0)
{
this.WriteMetricValue(data.SumUnion, useDouble, storeDoubleAsLong, writer);
}
if ((data.SamplingTypes & SamplingTypes.Count) != 0)
{
SerializationUtils.WriteUInt32AsBase128(writer, data.Count);
}
if ((data.SamplingTypes & SamplingTypes.SumOfSquareDiffFromMean) != 0)
{
writer.Write(data.SumOfSquareDiffFromMean);
}
if ((data.SamplingTypes & SamplingTypes.Histogram) != 0)
{
if (data.Histogram == null)
{
var message = string.Format(
CultureInfo.InvariantCulture,
"Invalid input data. Declared sampling type contains Histogram, but Histogram data is null. Metric:({0},{1},{2}).",
data.MonitoringAccount,
data.MetricNamespace,
data.MetricMetadata.MetricName);
throw new MetricSerializationException(message, null);
}
SerializationUtils.WriteHistogramDataHistogram(writer, data.Histogram.SamplesCount, data.Histogram.Samples, this.version > 3);
}
if ((data.SamplingTypes & SamplingTypes.HyperLogLogSketch) != 0)
{
if (data.HyperLogLogSketchesStream == null && data.HyperLogLogSketches == null)
{
var message = string.Format(
CultureInfo.InvariantCulture,
"Invalid input data. Declared sampling type contains sketches, but sketches data is null. Metric:({0},{1},{2}).",
data.MonitoringAccount,
data.MetricNamespace,
data.MetricMetadata.MetricName);
throw new MetricSerializationException(message, null);
}
if (data.HyperLogLogSketchesStream != null)
{
if (this.tempBuffer == null)
{
this.tempBuffer = new byte[TempBufferSize];
}
writer.Write((int)data.HyperLogLogSketchesStream.Length);
var sketchStreamStartPosition = data.HyperLogLogSketchesStream.Position;
SerializationUtils.ReadFromStream(data.HyperLogLogSketchesStream, writer.BaseStream, (int)data.HyperLogLogSketchesStream.Length, this.tempBuffer);
data.HyperLogLogSketchesStream.Position = sketchStreamStartPosition;
}
else
{
if (this.hllSerializationVersion == 0)
{
SerializationUtils.WriteHyperLogLogSketches(writer, data.HyperLogLogSketches.HyperLogLogSketchesCount, data.HyperLogLogSketches.HyperLogLogSketches);
}
else
{
SerializationUtils.WriteHyperLogLogSketchesV2(writer, data.HyperLogLogSketches.HyperLogLogSketchesCount, data.HyperLogLogSketches.HyperLogLogSketches);
}
}
}
if (this.version >= 6)
{
if ((data.SamplingTypes & samplingTypes & SamplingTypes.TDigest) != 0)
{
SerializationUtils.WriteUInt32AsBase128(writer, FrontEndMetricDeserializer<IMetricMetadata>.TDigestPrefixValue);
long pos = writerStream.Position;
// placeholder for length encoded as 4 bytes
writer.Write((ushort)0);
writer.Write((ushort)0);
data.TDigest.Serialize(writer);
long tdigestSerializedLength = writerStream.Position - pos - 4;
if (tdigestSerializedLength > ushort.MaxValue)
{
throw new ArgumentException("TDigest too big");
}
writerStream.Position = pos;
SerializationUtils.WriteUInt32InBase128AsFixed4Bytes(writer, (ushort)tdigestSerializedLength);
writerStream.Position += tdigestSerializedLength;
}
SerializationUtils.WriteUInt32AsBase128(writer, 0);
}
this.currentMetricDataBlockSize = writer.BaseStream.Position;
}
var currentPosition = writer.BaseStream.Position;
writer.BaseStream.Position = metricsCountPosition;
// Versions before 2 used variable number of bytes to write number of serialized metrics data.
// From version 2 passing IEnumerable<IReadOnlyMetric> is supported, thus number of metrics data
// is unknown beforehand and we cannot use variable number anymore. Thus we use fixed 4 bytes
// uint. To keep compatibility with previous versions, while still have ability to serialize
// variable amount of data, we write uint number in variable manner but with fixed number of bytes.
if (this.version >= 2)
{
writer.Write(metricsCount);
}
else
{
SerializationUtils.WriteUInt32InBase128AsFixed4Bytes(writer, metricsCount);
}
writer.BaseStream.Position = currentPosition;
}
private void WriteMetricMetadataIndex(BinaryWriter writer, IMetricMetadata value)
{
uint index;
if (!this.metadataIndexes.TryGetValue(value, out index))
{
index = this.nextMetadataIndex++;
this.metadataIndexes.Add(value, index);
this.metadatas.Add(value);
if (this.estimatePacketSize)
{
// In versions 0-2 Metric Namespace was part of the Metric data, from version 3 it became a part of Metric Metadata
if (this.version >= 3)
{
this.currentMetadataDictionaryBlockSize += SerializationUtils.EstimateUInt32InBase128Size(this.RegisterString(value.MetricNamespace));
}
this.currentMetadataDictionaryBlockSize += SerializationUtils.EstimateUInt32InBase128Size(this.RegisterString(value.MetricName));
this.currentMetadataDictionaryBlockSize += SerializationUtils.EstimateUInt32InBase128Size((uint)value.DimensionsCount);
for (int i = 0; i < value.DimensionsCount; ++i)
{
this.currentMetadataDictionaryBlockSize += SerializationUtils.EstimateUInt32InBase128Size(this.RegisterString(value.GetDimensionName(i)));
}
}
}
SerializationUtils.WriteUInt32AsBase128(writer, index);
}
private void WriteMetricMetadata(BinaryWriter writer, IMetricMetadata value)
{
// In versions 0-2 Metric Namespace was part of the Metric data, from version 3 it became a part of Metric Metadata
if (this.version >= 3)
{
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(value.MetricNamespace));
}
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(value.MetricName));
SerializationUtils.WriteUInt32AsBase128(writer, (uint)value.DimensionsCount);
for (var i = 0; i < value.DimensionsCount; ++i)
{
SerializationUtils.WriteUInt32AsBase128(writer, this.RegisterString(value.GetDimensionName(i)));
}
}
private void WriteMetricValue(MetricValueV2 value, bool useDouble, bool storeDoubleAsLong, BinaryWriter writer)
{
if (useDouble)
{
if (storeDoubleAsLong)
{
SerializationUtils.WriteInt64AsBase128(writer, (long)value.ValueAsDouble);
}
else
{
writer.Write(value.ValueAsDouble);
}
}
else
{
SerializationUtils.WriteUInt64AsBase128(writer, value.ValueAsULong);
}
}
private uint RegisterString(string value)
{
uint index;
value = value ?? string.Empty;
if (!this.stringIndexes.TryGetValue(value, out index))
{
index = this.nextStringIndex++;
this.stringIndexes.Add(value, index);
this.strings.Add(value);
if (this.estimatePacketSize)
{
this.currentStringDictionaryBlockSize += this.EstimateStringSize(value);
}
}
return index;
}
private sealed class MetricEqualityComparer : IEqualityComparer<IMetricMetadata>
{
public static readonly MetricEqualityComparer Instance = new MetricEqualityComparer();
public bool Equals(IMetricMetadata x, IMetricMetadata y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (x.DimensionsCount != y.DimensionsCount ||
!StringComparer.OrdinalIgnoreCase.Equals(x.MetricNamespace, y.MetricNamespace) ||
!StringComparer.OrdinalIgnoreCase.Equals(x.MetricName, y.MetricName))
{
return false;
}
for (int i = 0; i < x.DimensionsCount; ++i)
{
if (!StringComparer.OrdinalIgnoreCase.Equals(x.GetDimensionName(i), y.GetDimensionName(i)))
{
return false;
}
}
return true;
}
public int GetHashCode(IMetricMetadata obj)
{
return StringComparer.OrdinalIgnoreCase.GetHashCode(obj.MetricName) ^
StringComparer.OrdinalIgnoreCase.GetHashCode(obj.MetricNamespace);
}
}
private sealed class NoCloseBinaryWriter : BinaryWriter
{
public NoCloseBinaryWriter(Stream stream, Encoding encoding)
: base(stream, encoding)
{
}
protected override void Dispose(bool disposing)
{
this.BaseStream.Flush();
base.Dispose(false);
}
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Utility;
using Microsoft.Online.Metrics.Serialization.Configuration;
using Newtonsoft.Json;
/// <summary>
/// The class to read and write metric configurations.
/// </summary>
internal sealed class MetricConfigurationManager
{
#pragma warning disable SA1401 // Fields must be private
/// <summary>
/// The relative URL for metrics configuration
/// </summary>
public readonly string ConfigRelativeUrl;
#pragma warning restore SA1401 // Fields must be private
/// <summary>
/// The HTTP client instance.
/// </summary>
private readonly HttpClient httpClient;
/// <summary>
/// The connection information
/// </summary>
private readonly ConnectionInfo connectionInfo;
/// <summary>
/// Initializes a new instance of the <see cref="MetricConfigurationManager" /> class.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
public MetricConfigurationManager(ConnectionInfo connectionInfo)
{
this.connectionInfo = connectionInfo;
this.ConfigRelativeUrl = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.ConfigRelativeUrl);
this.httpClient = HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo);
}
/// <summary>
/// Gets the metric configuration.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <returns>The metric configuration.</returns>
public async Task<MetricConfigurationV2> Get(MetricIdentifier metricIdentifier)
{
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}",
this.connectionInfo.GetEndpoint(metricIdentifier.MonitoringAccount),
this.ConfigRelativeUrl,
metricIdentifier.MonitoringAccount,
SpecialCharsHelper.EscapeTwice(metricIdentifier.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricIdentifier.MetricName));
var response = await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Get,
this.httpClient,
metricIdentifier.MonitoringAccount,
this.ConfigRelativeUrl).ConfigureAwait(false);
return JsonConvert.DeserializeObject<MetricConfigurationV2>(response.Item1);
}
/// <summary>
/// Deletes the metric configuration.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <returns>A task representing the deletion of the configuration.</returns>
/// <remarks>
/// It deletes only the metric configuration, not the metric data.
/// </remarks>
public async Task Delete(MetricIdentifier metricIdentifier)
{
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}",
this.connectionInfo.GetEndpoint(metricIdentifier.MonitoringAccount),
this.ConfigRelativeUrl,
metricIdentifier.MonitoringAccount,
SpecialCharsHelper.EscapeTwice(metricIdentifier.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricIdentifier.MetricName));
await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Delete,
this.httpClient,
metricIdentifier.MonitoringAccount,
this.ConfigRelativeUrl).ConfigureAwait(false);
}
/// <summary>
/// Posts the metric configuration.
/// </summary>
/// <param name="configuration">The metric configuration.</param>
/// <returns>A task representing the update of the configuration.</returns>
public async Task Post(MetricConfigurationV2 configuration)
{
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}",
this.connectionInfo.GetEndpoint(configuration.MonitoringAccount),
this.ConfigRelativeUrl,
configuration.MonitoringAccount,
SpecialCharsHelper.EscapeTwice(configuration.MetricNamespace),
SpecialCharsHelper.EscapeTwice(configuration.MetricName));
await HttpClientHelper.GetResponse(
new Uri(url),
HttpMethod.Post,
this.httpClient,
configuration.MonitoringAccount,
this.ConfigRelativeUrl,
configuration).ConfigureAwait(false);
}
}
}<file_sep>// -----------------------------------------------------------------------
// <copyright file="ConsoleLogEngine.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Logging
{
using System;
using System.Globalization;
/// <summary>
/// Log engine that sends its output to console.
/// </summary>
internal sealed class ConsoleLogEngine : ILogEngine
{
/// <summary>
/// Logs the given data according to the engine implementation.
/// </summary>
/// <param name="level">
/// Level of the log statement.
/// </param>
/// <param name="logId">
/// Log identification for classifying log statements.
/// </param>
/// <param name="tag">
/// Extra string that allows another level of classification under the log id.
/// </param>
/// <param name="format">
/// Message to be logged, it can be a format message.
/// </param>
/// <param name="objectParams">
/// Optional, any parameter to be used to build the formatted message string.
/// </param>
public void Log(LoggerLevel level, object logId, string tag, string format, params object[] objectParams)
{
if (level == LoggerLevel.CustomerFacingInfo)
{
var customerFacingLog = string.Format(
CultureInfo.InvariantCulture,
"[{0}] {1}",
DateTime.UtcNow.ToString("hh:mm:ss"),
format);
Console.BackgroundColor = ConsoleColor.Black;
if (objectParams == null || objectParams.Length == 0)
{
Console.WriteLine(customerFacingLog);
}
else
{
Console.WriteLine(customerFacingLog, objectParams);
}
}
else
{
var s = string.Format(
CultureInfo.InvariantCulture,
"UTC=[{0}] Level=[{1}] LogId=[{2}] Tag=[{3}] {4}",
DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fff"),
level,
logId,
tag,
format);
if (level <= LoggerLevel.Warning)
{
Console.BackgroundColor = ConsoleColor.Blue;
}
else
{
Console.BackgroundColor = ConsoleColor.Black;
}
if (objectParams == null || objectParams.Length == 0)
{
Console.WriteLine(s);
}
else
{
Console.WriteLine(s, objectParams);
}
Console.BackgroundColor = ConsoleColor.Black;
}
}
/// <summary>
/// Checks if a log statement with the given parameters will be actually logged or
/// not. Useful to avoid expensive operations for log statements that are going to
/// be dropped by the log engine.
/// </summary>
/// <param name="level">
/// Level of the log statement.
/// </param>
/// <param name="logId">
/// Log identification for classifying log statements.
/// </param>
/// <param name="tag">
/// Extra string that allows another level of classification under the log id.
/// </param>
/// <returns>
/// True if the statement is going to be logged, false otherwise.
/// </returns>
public bool IsLogged(LoggerLevel level, object logId, string tag)
{
return true;
}
/// <summary>
/// Disposes resouces used by the object.
/// </summary>
public void Dispose()
{
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ApPkiAcl.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using Newtonsoft.Json;
/// <summary>
/// AP PKI ACL with access to MDM.
/// </summary>
/// <seealso cref="Microsoft.Cloud.Metrics.Client.Configuration.IPermissionV2" />
public sealed class ApPkiAcl : IPermissionV2
{
/// <summary>
/// Initializes a new instance of the <see cref="ApPkiAcl"/> class.
/// </summary>
/// <param name="identity">The AP PKI ACL.</param>
/// <param name="roleConfiguration">The role granted to this ACL.</param>
/// <param name="description">The description of ACL. Default value is null.</param>
[JsonConstructor]
public ApPkiAcl(string identity, RoleConfiguration roleConfiguration, string description = null)
{
if (string.IsNullOrWhiteSpace(identity))
{
throw new ArgumentNullException(nameof(identity));
}
this.Identity = identity;
this.Description = description;
this.RoleConfiguration = roleConfiguration;
}
/// <summary>
/// The identity to grant permission.
/// </summary>
public string Identity { get; }
/// <inheritdoc />
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public string Description { get; }
/// <summary>
/// The level of access to be granted to this identity.
/// </summary>
public RoleConfiguration RoleConfiguration { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ConfigurationUpdateResult.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// A class to represent the configuration update result.
/// </summary>
public sealed class ConfigurationUpdateResult : IConfigurationUpdateResult
{
/// <summary>
/// Initializes a new instance of the <see cref="ConfigurationUpdateResult"/> class.
/// </summary>
/// <param name="monitoringAccount">Monitoring account name.</param>
/// <param name="success">Success result of operation.</param>
/// <param name="message">Exception details if any.</param>
[JsonConstructor]
public ConfigurationUpdateResult(
string monitoringAccount,
bool success,
string message)
{
this.MonitoringAccount = monitoringAccount;
this.Success = success;
this.Message = message;
}
/// <summary>
/// Monitoring account on which configuration was updated.
/// </summary>
public string MonitoringAccount { get; }
/// <summary>
/// True if configuration is updated successfully. False, otherwise.
/// </summary>
public bool Success { get; set; }
/// <summary>
/// Exception details in case of failures.
/// </summary>
public string Message { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MonitoringAccount.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
/// <summary>
/// The monitoring account configuration object used for JSON serialization and deserialization.
/// </summary>
public sealed class MonitoringAccount : IMonitoringAccount
{
/// <summary>
/// List of permissions granted access to this account.
/// </summary>
private readonly IList<IPermissionV2> permissions;
/// <summary>
/// List of mirror monitoring accounts.
/// </summary>
private readonly IList<string> mirrorMonitoringAccountList;
/// <summary>
/// Initializes a new instance of the <see cref="MonitoringAccount"/> class.
/// </summary>
/// <param name="name">The name of the account.</param>
/// <param name="description">The description of the account.</param>
/// <param name="permissionsV2">The permissions associated with the account.</param>
public MonitoringAccount(
string name,
string description,
IEnumerable<IPermissionV2> permissionsV2)
: this(name, null, description, default(DateTime), permissionsV2, null, 1, null, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MonitoringAccount" /> class.
/// </summary>
/// <param name="name">The name of the account.</param>
/// <param name="displayName">The display name of the account.</param>
/// <param name="description">The description of the account.</param>
/// <param name="lastUpdatedTimeUtc">The last updated time.</param>
/// <param name="permissionsV2">The permissions associated with the account.</param>
/// <param name="lastUpdatedBy">The identity that last updated the account.</param>
/// <param name="version">The version of the account.</param>
/// <param name="mirrorMonitoringAccountList">The list of mirror monitoring accounts.</param>
/// <param name="homeStampHostName">Name of the home stamp host for this account.</param>
[JsonConstructor]
internal MonitoringAccount(
string name,
string displayName,
string description,
DateTime lastUpdatedTimeUtc,
IEnumerable<IPermissionV2> permissionsV2,
string lastUpdatedBy,
uint version,
IEnumerable<string> mirrorMonitoringAccountList,
string homeStampHostName)
{
if (permissionsV2 == null)
{
throw new ArgumentNullException(nameof(permissionsV2));
}
this.Name = name;
this.DisplayName = displayName;
this.Description = description;
this.LastUpdatedTimeUtc = lastUpdatedTimeUtc;
this.LastUpdatedBy = lastUpdatedBy;
this.Version = version;
this.permissions = permissionsV2.ToList();
this.HomeStampHostName = homeStampHostName;
this.mirrorMonitoringAccountList = mirrorMonitoringAccountList?.ToList() ?? new List<string>();
}
/// <summary>
/// The name of the monitoring account.
/// </summary>
public string Name { get; }
/// <summary>
/// The display name of the monitoring account.
/// </summary>
public string DisplayName { get; set; }
/// <summary>
/// The description of the monitoring account.
/// </summary>
public string Description { get; set; }
/// <summary>
/// The host name of the MDM stamp that currently owns this account.
/// </summary>
public string HomeStampHostName { get; }
/// <summary>
/// The list of entities that have access to this MDM account and their roles.
/// </summary>
[JsonProperty(PropertyName = "PermissionsV2")]
public IEnumerable<IPermissionV2> Permissions
{
get { return this.permissions; }
}
/// <summary>
/// The time the account was last updated.
/// </summary>
public DateTime LastUpdatedTimeUtc { get; }
/// <summary>
/// The identity that updated the account most recently.
/// </summary>
public string LastUpdatedBy { get; }
/// <summary>
/// The version of the monitoring account configuration.
/// </summary>
public uint Version { get; }
/// <inheritdoc />
public IEnumerable<string> MirrorMonitoringAccountList
{
get
{
return this.mirrorMonitoringAccountList;
}
}
/// <summary>
/// Gets or sets the value for the maximum age, as a <see cref="TimeSpan"/>, of metrics to be accepted.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
internal TimeSpan MaxMetricAge { get; set; }
/// <summary>
/// Gets or sets a flag indicating whether to prefer to create new pre-aggregates on metrics store.
/// </summary>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
internal bool? PreferNewPreaggregateOnMetricsStore { get; set; }
/// <summary>
/// Adds a permission to the account.
/// </summary>
/// <param name="permission">Permission to add.</param>
public void AddPermission(IPermissionV2 permission)
{
this.permissions.Add(permission);
}
/// <summary>
/// Remove permission from the account.
/// </summary>
/// <param name="permission">Permission to remove.</param>
public void RemovePermission(IPermissionV2 permission)
{
this.permissions.Remove(permission);
}
/// <inheritdoc />
public void AddMirrorMonitoringAccount(string monitoringAccountName)
{
this.mirrorMonitoringAccountList.Add(monitoringAccountName);
}
/// <inheritdoc />
public void RemoveMirrorMonitoringAccount(string monitoringAccountName)
{
this.mirrorMonitoringAccountList.Remove(monitoringAccountName);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="RoleConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// The configuration for the MDM role which grants certain premissions to entities using the system.
/// </summary>
public sealed class RoleConfiguration
{
/// <summary>
/// This role is meant for a certificate only since users all have read access.
/// </summary>
public static readonly RoleConfiguration ReadOnly = new RoleConfiguration("ReadOnly");
/// <summary>
/// This role is meant for a certificate primarily but does not need to be restricted to such.
/// Allows a certificate to read or publish data only but not be used for account settings modification.
/// </summary>
public static readonly RoleConfiguration MetricPublisher = new RoleConfiguration("MetricPublisher");
/// <summary>
/// This role has the ability to modify and create dashboards within an account.
/// </summary>
public static readonly RoleConfiguration DashboardEditor = new RoleConfiguration("DashboardEditor");
/// <summary>
/// This role has the ability to modify and create monitors within an account.
/// </summary>
public static readonly RoleConfiguration MonitorEditor = new RoleConfiguration("MonitorEditor");
/// <summary>
/// This role has the ability to modify and create monitors/metrics within an account.
/// </summary>
public static readonly RoleConfiguration MetricAndMonitorEditor = new RoleConfiguration("MetricAndMonitorEditor");
/// <summary>
/// This role has the ability to modify metric, monitor or health configuration within an account.
/// </summary>
public static readonly RoleConfiguration ConfigurationEditor = new RoleConfiguration("ConfigurationEditor");
/// <summary>
/// Full access to modify configuration, account settings and dashboards.
/// </summary>
public static readonly RoleConfiguration Administrator = new RoleConfiguration("Administrator");
/// <summary>
/// Initializes a new instance of the <see cref="RoleConfiguration"/> class.
/// </summary>
/// <param name="name">The name.</param>
public RoleConfiguration(string name)
{
this.Name = name;
}
/// <summary>
/// Gets the name of the role.
/// </summary>
public string Name { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MetricEnrichmentTransformationType.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricEnrichmentRuleManagement
{
/// <summary>
/// Represents the metric enrichment transformation type.
/// </summary>
public enum MetricEnrichmentTransformationType
{
/// <summary>
/// Adds new dimensions to the event.
/// </summary>
Add,
/// <summary>
/// Replaces existing dimension in the event.
/// </summary>
Replace,
/// <summary>
/// Drops the event.
/// </summary>
Drop
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="QueryResultsList.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Utility;
/// <summary>
/// Represents a list of IQueryResult
/// </summary>
public sealed class QueryResultsList
{
/// <summary>
/// Initializes a new instance of the <see cref="QueryResultsList"/> class.
/// </summary>
/// <param name="startTimeUtc">The start time.</param>
/// <param name="endTimeUtc">The end time.</param>
/// <param name="timeResolutionInMilliseconds">The time resolution in milliseconds.</param>
/// <param name="results">The results.</param>
[JsonConstructor]
internal QueryResultsList(long startTimeUtc, long endTimeUtc, long timeResolutionInMilliseconds, IReadOnlyList<QueryResult> results)
{
this.StartTimeUtc = UnixEpochHelper.FromMillis(startTimeUtc);
this.EndTimeUtc = UnixEpochHelper.FromMillis(endTimeUtc);
this.TimeResolutionInMilliseconds = timeResolutionInMilliseconds;
this.Results = results;
}
/// <summary>
/// Gets the end time in UTC for the query results.
/// </summary>
public DateTime EndTimeUtc { get; private set; }
/// <summary>
/// Gets the start time in UTC for the query results.
/// </summary>
public DateTime StartTimeUtc { get; private set; }
/// <summary>
/// Gets the time resolution in milliseconds for the query results.
/// </summary>
public long TimeResolutionInMilliseconds { get; private set; }
/// <summary>
/// Gets the query results. Each result represent a single time series where start time, end time and time resolution
/// is represented by this object members.
/// </summary>
public IReadOnlyList<IQueryResult> Results { get; private set; }
}
}
<file_sep>// <copyright file="MetricSerializationException.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// <author email="selavrin">
// <NAME>
// </author>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
/// <summary>
/// A general exception used to report about metric serialization or deserialization failures.
/// </summary>
[Serializable]
public class MetricSerializationException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="MetricSerializationException"/> class.
/// </summary>
/// <param name="message">Message describing exception situation.</param>
/// <param name="innerException">Inner exception which caused exception situation.</param>
/// <param name="isInvalidData">True if the exception is because of invalid data in the packet.</param>
public MetricSerializationException(string message, Exception innerException, bool isInvalidData = false)
: base(message, innerException)
{
this.IsInvalidData = true;
}
/// <summary>
/// Gets a value indicating whether the failure is because of invalid data in the packet.
/// </summary>
public bool IsInvalidData { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IMetricReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Query;
using Microsoft.Online.Metrics.Serialization.Configuration;
/// <summary>
/// The interface for reading metric data.
/// </summary>
public interface IMetricReader
{
/// <summary>
/// Gets the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="definition">The time series definition.</param>
/// <returns>
/// The time series of for the given <paramref name="definition"/>.
/// </returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<TimeSeries<MetricIdentifier, double?>> GetTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
TimeSeriesDefinition<MetricIdentifier> definition);
/// <summary>
/// Gets the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="definition">The time series definition.</param>
/// <returns>
/// The time series of for the given <paramref name="definition"/>.
/// </returns>
/// <remarks>This API uses <see cref="AggregationType.Automatic"/> by default and other overloads are available for specific <see cref="AggregationType"/>.</remarks>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<TimeSeries<MetricIdentifier, double?>> GetTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
int seriesResolutionInMinutes,
TimeSeriesDefinition<MetricIdentifier> definition);
/// <summary>
/// Gets time series with multiple sampling types.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="definition">The time series definition.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="aggregationType">The aggregation function used to reduce the resolution of the returned series.</param>
/// <returns>
/// The time series for the given <paramref name="definition" />.
/// </returns>
Task<TimeSeries<MetricIdentifier, double?>> GetTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType[] samplingTypes,
TimeSeriesDefinition<MetricIdentifier> definition,
int seriesResolutionInMinutes = 1,
AggregationType aggregationType = AggregationType.Automatic);
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions"/>.
/// </returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
params TimeSeriesDefinition<MetricIdentifier>[] definitions);
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions"/>.
/// </returns>
/// <remarks>This API uses <see cref="AggregationType.Automatic"/> by default and other overloads are available for specific <see cref="AggregationType"/>.</remarks>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
int seriesResolutionInMinutes,
params TimeSeriesDefinition<MetricIdentifier>[] definitions);
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions"/>.
/// </returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions);
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="definitions">The time series definitions.</param>
/// <remarks>This API uses <see cref="AggregationType.Automatic"/> by default and other overloads are available for specific <see cref="AggregationType"/>.</remarks>
/// <returns>
/// The time series of for the given <paramref name="definitions"/>.
/// </returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
int seriesResolutionInMinutes,
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions);
/// <summary>
/// Gets a list of the time series, each with multiple sampling types.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="definitions">The time series definitions.</param>
/// <param name="seriesResolutionInMinutes">The resolution window used to reduce the resolution of the returned series.</param>
/// <param name="aggregationType">The aggregation function used to reduce the resolution of the returned series.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions" />.
/// </returns>
Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType[] samplingTypes,
IEnumerable<TimeSeriesDefinition<MetricIdentifier>> definitions,
int seriesResolutionInMinutes = 1,
AggregationType aggregationType = AggregationType.Automatic);
/// <summary>
/// Gets a list of the time series.
/// </summary>
/// <param name="definitions">The time series definitions.</param>
/// <returns>
/// The time series of for the given <paramref name="definitions"/>.
/// </returns>
Task<IReadOnlyList<TimeSeries<MetricIdentifier, double?>>> GetMultipleTimeSeriesAsync(IList<TimeSeriesDefinition<MetricIdentifier>> definitions);
/// <summary>
/// Gets the list of namespaces for the <paramref name="monitoringAccount"/>.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <returns>The list of namespaces for the <paramref name="monitoringAccount"/>.</returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<string>> GetNamespacesAsync(string monitoringAccount);
/// <summary>
/// Gets the list of metric names for the <paramref name="monitoringAccount" /> and <paramref name="metricNamespace" />.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <returns>
/// The list of metric names for the <paramref name="monitoringAccount" /> and <paramref name="metricNamespace" />.
/// </returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<string>> GetMetricNamesAsync(string monitoringAccount, string metricNamespace);
/// <summary>
/// Gets the list of dimension names for the <paramref name="metricId" />.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <returns>
/// The list of dimension names for the <paramref name="metricId" />.
/// </returns>
Task<IReadOnlyList<string>> GetDimensionNamesAsync(MetricIdentifier metricId);
/// <summary>
/// Gets the list of pre-aggregate configurations for the <paramref name="metricId" />.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <returns>
/// The list of pre-aggregate configurations for the <paramref name="metricId" />.
/// </returns>
Task<IReadOnlyList<PreAggregateConfiguration>> GetPreAggregateConfigurationsAsync(MetricIdentifier metricId);
/// <summary>
/// Gets the known dimension combinations.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <returns>Time series definitions with known dimension combinations.</returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>>> GetKnownTimeSeriesDefinitionsAsync(
MetricIdentifier metricId,
params DimensionFilter[] dimensionFilters);
/// <summary>
/// Gets the known dimension combinations.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <returns>Time series definitions with known dimension combinations.</returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>>> GetKnownTimeSeriesDefinitionsAsync(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters);
/// <summary>
/// Gets the dimension values for <paramref name="dimensionName"/> satifying the <paramref name="dimensionFilters"/>
/// and time range (<paramref name="startTimeUtc"/>, <paramref name="endTimeUtc"/>)
/// </summary>
/// <remarks>
/// Time range resolution is day.
/// </remarks>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">
/// The dimension filters representing the pre-aggregate dimensions.
/// Create an emtpy include filter for dimension with no filter values.
/// Requested dimension should also be part of this and should be empty.
/// </param>
/// <param name="dimensionName">Name of the dimension for which values are requested.</param>
/// <param name="startTimeUtc">Start time for evaluating dimension values.</param>
/// <param name="endTimeUtc">End time for evaluating dimension values.</param>
/// <returns>Dimension values for <paramref name="dimensionName"/>.</returns>
Task<IReadOnlyList<string>> GetDimensionValuesAsync(
MetricIdentifier metricId,
List<DimensionFilter> dimensionFilters,
string dimensionName,
DateTime startTimeUtc,
DateTime endTimeUtc);
/// <summary>
/// Gets the known dimension combinations.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">Start time for dimension values.</param>
/// <param name="endTimeUtc">End time for dimension values.</param>
/// <param name="newCombinationsOnly">
/// If true only combinations which were added into memory in this session of hinting system after fromTimeUtc.
/// This flag does *not* guarantee that only new combinations will be returned
/// It is more of a hint to the hinting system to try to give only new combinations in given time range.
/// </param>
/// <returns>Time series definitions with known dimension combinations.</returns>
/// <exception cref="MetricsClientException">
/// This exception is thrown on all failures in communication.
/// One should look at inner exception for more details.
/// </exception>
Task<IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>>> GetKnownTimeSeriesDefinitionsAsync(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
bool newCombinationsOnly = false);
/// <summary>
/// Gets the filtered dimension values asynchronously.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="reducer">The reducing function to apply to the time series.</param>
/// <param name="queryFilter">The query filter.</param>
/// <param name="includeSeries">if set to <c>true</c> include series values.</param>
/// <param name="selectionClause">Reduce result to top N results of the query.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series.</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution.</param>
/// <returns>A list of filtered dimension values.</returns>
[Obsolete]
Task<IReadOnlyList<IQueryResult>> GetFilteredDimensionValuesAsync(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
Reducer reducer,
QueryFilter queryFilter,
bool includeSeries,
SelectionClause selectionClause = null,
AggregationType aggregationType = AggregationType.Sum,
long seriesResolutionInMinutes = 1);
/// <summary>
/// Gets the filtered dimension values asynchronously.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters.</param>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="samplingType">The sampling type.</param>
/// <param name="reducer">The reducing function to apply to the time series.</param>
/// <param name="queryFilter">The query filter.</param>
/// <param name="includeSeries">if set to <c>true</c> include series values.</param>
/// <param name="selectionClause">Reduce result to top N results of the query.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series.</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution.</param>
/// <returns>A list of filtered dimension values.</returns>
[Obsolete]
Task<QueryResultsList> GetFilteredDimensionValuesAsyncV2(
MetricIdentifier metricId,
IEnumerable<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
SamplingType samplingType,
Reducer reducer,
QueryFilter queryFilter,
bool includeSeries,
SelectionClause selectionClause = null,
AggregationType aggregationType = AggregationType.Sum,
long seriesResolutionInMinutes = 1);
/// <summary>
/// Gets the time series values that match the filtering criteria.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="selectionClause">Reduce result to top N results of the query. By default, all results are returned.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series. By default, automatic resolution is used (same as Jarvis UI).</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution. 1 minute resolution (full resolution in MDM today) by default.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <param name="outputDimensionNames">The output dimension names.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.</param>
/// <returns>
/// Time series definitions matching the query criteria.
/// </returns>
[Obsolete]
Task<IQueryResultListV3> GetFilteredDimensionValuesAsyncV3(
MetricIdentifier metricId,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
IReadOnlyList<SamplingType> samplingTypes,
SelectionClauseV3 selectionClause = null,
AggregationType aggregationType = AggregationType.Automatic,
long seriesResolutionInMinutes = 1,
Guid? traceId = null,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false);
/// <summary>
/// Gets the time series values that match the filtering criteria.
/// Note: This API is for advanced scenarios only. Use it only when you fetch huge amounts of metrics from
/// multiple stamps in parallel and face performance problems related to memory usage
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="selectionClause">Reduce result to top N results of the query. By default, all results are returned.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series. By default, automatic resolution is used (same as Jarvis UI).</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution. 1 minute resolution (full resolution in MDM today) by default.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <param name="outputDimensionNames">The output dimension names.</param>
/// <returns>
/// Time HTTP response
/// </returns>
Task<HttpResponseMessage> GetTimeSeriesStreamedAsync(
MetricIdentifier metricId,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
IReadOnlyList<SamplingType> samplingTypes,
SelectionClauseV3 selectionClause = null,
AggregationType aggregationType = AggregationType.Automatic,
long seriesResolutionInMinutes = 1,
Guid? traceId = null,
IReadOnlyList<string> outputDimensionNames = null);
/// <summary>
/// Gets the time series values that match the filtering criteria.
/// </summary>
/// <param name="metricId">The metric identifier.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="samplingTypes">The sampling types.</param>
/// <param name="selectionClause">Reduce result to top N results of the query. By default, all results are returned.</param>
/// <param name="aggregationType">Aggregation function to use when reducing the resolution of the returned series. By default, automatic resolution is used (same as Jarvis UI).</param>
/// <param name="seriesResolutionInMinutes">Reduce size of included series array by adjusting the resolution. 1 minute resolution (full resolution in MDM today) by default.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <param name="outputDimensionNames">The output dimension names.</param>
/// <param name="lastValueMode">Indicating if the query should be fulfilled with last value mode. If true, null values in the query range requested will be filled with the last known value.</param>
/// <returns>
/// Time series definitions matching the query criteria.
/// </returns>
Task<IQueryResultListV3> GetTimeSeriesAsync(
MetricIdentifier metricId,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
IReadOnlyList<SamplingType> samplingTypes,
SelectionClauseV3 selectionClause = null,
AggregationType aggregationType = AggregationType.Automatic,
long seriesResolutionInMinutes = 1,
Guid? traceId = null,
IReadOnlyList<string> outputDimensionNames = null,
bool lastValueMode = false);
/// <summary>
/// Returns all metric definitions for time series satisfying given set of filters in the given monitoring account.
/// </summary>
/// <remarks>
/// QOS metrics, composite metrics and wild card metrics are not included in the result set.
/// </remarks>
/// <param name="monitoringAccount">Monitoring account name.</param>
/// <param name="dimensionFilters">The dimension filters, used to indicate one or more possible dimension values.</param>
/// <param name="startTimeUtc">Start time for time series.</param>
/// <param name="endTimeUtc">End time for time series.</param>
/// <param name="traceId">The trace identifier for the query, used for diagnostic purposes only. If a trace id is not provided, one will be generated.</param>
/// <returns>
/// All metric definitions which has data for time series keys satisfy the given set of filters.
/// </returns>
Task<IReadOnlyList<MetricDefinitionV2>> GetMetricDefinitionsAsync(
string monitoringAccount,
IReadOnlyList<DimensionFilter> dimensionFilters,
DateTime startTimeUtc,
DateTime endTimeUtc,
Guid? traceId = null);
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="PublicationConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// Determine which metric storage should be used for the data from this preaggregate.
/// </summary>
public sealed class PublicationConfiguration : IPublicationConfiguration
{
/// <summary>
/// Data store configuration where data will publish to metric store only.
/// </summary>
public static readonly PublicationConfiguration MetricStore = new PublicationConfiguration(true, true, false);
/// <summary>
/// Data store configuration where data will publish to cache server only.
/// </summary>
public static readonly PublicationConfiguration CacheServer = new PublicationConfiguration(false, false, false);
/// <summary>
/// Data store configuration where data will publish to cache server and metric store.
/// </summary>
public static readonly PublicationConfiguration CacheServerAndRawMetricsStore = new PublicationConfiguration(true, false, false);
/// <summary>
/// Data store configuration where data will publish to cache server and metric store as an aggregated metric.
/// </summary>
public static readonly PublicationConfiguration CacheServerAndAggregatedMetricsStore = new PublicationConfiguration(true, false, true);
/// <summary>
/// Data store configuration where data will publish to metrics store as an aggregated metric only.
/// </summary>
public static readonly PublicationConfiguration AggregatedMetricsStore = new PublicationConfiguration(true, true, true);
/// <summary>
/// Initializes a new instance of the <see cref="PublicationConfiguration" /> class.
/// </summary>
/// <param name="metricStorePublicationEnabled">If metric store publication is enabled.</param>
/// <param name="cacheServerPublicationDisabled">IF cache server publication is disabled.</param>
/// <param name="aggregatedMetricsStorePublication">If aggregated metrics store publication is enabled.</param>
[JsonConstructor]
internal PublicationConfiguration(bool metricStorePublicationEnabled, bool cacheServerPublicationDisabled, bool aggregatedMetricsStorePublication)
{
this.MetricStorePublicationEnabled = metricStorePublicationEnabled;
this.CacheServerPublicationDisabled = cacheServerPublicationDisabled;
this.AggregatedMetricsStorePublication = aggregatedMetricsStorePublication;
}
/// <summary>
/// Determines if the metrics store is enabled or disabled.
/// </summary>
public bool MetricStorePublicationEnabled { get; }
/// <summary>
/// Gets a value indicating whether cache server publication is disabled.
/// </summary>
public bool CacheServerPublicationDisabled { get; }
/// <summary>
/// Gets a value indicating whether the preaggregate should be published as an aggregated metrics store metric.
/// </summary>
public bool AggregatedMetricsStorePublication { get; }
}
}<file_sep>// <copyright file="StampLocatorActivity.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
namespace Microsoft.Cloud.Metrics.Client.ThirdParty
{
/// <summary>
/// The stamp locator activities.
/// </summary>
public enum StampLocatorActivity
{
/// <summary>
/// Refreshing the region stamp map from the MDM backend API.
/// </summary>
StartToRefrehRegionStampMap,
FinishedRefreshingRegionStampMap,
FailedToRefrehRegionStampMap,
/// <summary>
/// Loading the region stamp map from the local file regionStampMap.json.
/// </summary>
StartToLoadRegionStampMapFromLocalFile,
FinishedLoadingRegionStampMapFromLocalFile,
FailedToLoadRegionStampMapFromLocalFile,
/// <summary>
/// Writing the region stamp map to the local file regionStampMap.json.
/// </summary>
StartToWriteRegionStampMapToLocalFile,
FinishedWritingRegionStampMapToLocalFile,
FailedToWriteRegionStampMapToLocalFile,
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="FileNamePathHelper.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Utility
{
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Security.Cryptography;
using System.Text;
/// <summary>
/// The helper class for file names and paths.
/// </summary>
internal static class FileNamePathHelper
{
/// <summary>
/// The default maximum file name allowed.
/// </summary>
internal const int MaximumFileNameAllowed = 256;
/// <summary>
/// The json file extension.
/// </summary>
internal const string JsonFileExtension = ".json";
/// <summary>
/// The javascript file extension.
/// </summary>
private const string JsFileExtension = ".js";
/// <summary>
/// The sorted invalid file chars.
/// </summary>
private static readonly char[] SortedInvalidFileChars;
/// <summary>
/// Initializes static members of the <see cref="FileNamePathHelper"/> class.
/// </summary>
static FileNamePathHelper()
{
SortedInvalidFileChars = Path.GetInvalidFileNameChars();
Array.Sort(SortedInvalidFileChars);
}
/// <summary>
/// Constructs the name of the valid file by joining the provided parameters with "_" while replacing invalid chars with "^".
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metric">The metric.</param>
/// <param name="monitorId">The monitor identifier.</param>
/// <param name="fileExtension">The file extension.</param>
/// <param name="maximumFileNameAllowed">The maximum length of the file name that will be returned.</param>
/// <returns>
/// A valid file name.
/// </returns>
[SuppressMessage("Microsoft.Security.Cryptography", "CA5354:SHA1CannotBeUsed", Justification = "Used for converting long string to short string, not for security or comparison")]
internal static string ConstructValidFileName(string monitoringAccount, string metricNamespace, string metric, string monitorId, string fileExtension, int maximumFileNameAllowed)
{
// To provide better back compat, only modify if it exceeds the max allowed in the first place.
var builder = new StringBuilder(maximumFileNameAllowed);
builder.Append(monitoringAccount);
if (!string.IsNullOrWhiteSpace(metricNamespace))
{
builder.Append('_').Append(metricNamespace);
}
if (!string.IsNullOrWhiteSpace(metric))
{
builder.Append('_').Append(metric);
}
if (!string.IsNullOrWhiteSpace(monitorId))
{
builder.Append('_').Append(monitorId);
}
var longFileNameString = builder.ToString();
var requiresHashing = builder.Length + fileExtension.Length > maximumFileNameAllowed;
if (requiresHashing)
{
builder.Clear();
const int hashValueLength = 16;
var totalNameLength = maximumFileNameAllowed - fileExtension.Length - hashValueLength - 1;
// Construct the allowed space for each part remaining (monitoring account always is included)
var providedParts = 1;
if (!string.IsNullOrWhiteSpace(metricNamespace))
{
++providedParts;
}
if (!string.IsNullOrWhiteSpace(metric))
{
++providedParts;
}
if (!string.IsNullOrWhiteSpace(monitorId))
{
++providedParts;
}
AppendShortedFilePart(builder, monitoringAccount, totalNameLength, ref providedParts);
AppendShortedFilePart(builder, metricNamespace, totalNameLength, ref providedParts);
AppendShortedFilePart(builder, metric, totalNameLength, ref providedParts);
AppendShortedFilePart(builder, monitorId, totalNameLength, ref providedParts);
}
RepalceInvalidFileChars(builder);
if (!requiresHashing)
{
return builder.Append(fileExtension).ToString();
}
var shortFileNameString = builder.ToString();
builder.Clear();
using (var hashGenerator = SHA1.Create())
{
// Using only first 8 bytes of hash as 'good enough' in this case
var data = hashGenerator.ComputeHash(Encoding.UTF8.GetBytes(longFileNameString.ToLowerInvariant()));
for (var i = 0; i < data.Length && i < 8; ++i)
{
builder.Append(data[i].ToString("x2"));
}
}
return shortFileNameString + "_" + builder + fileExtension;
}
/// <summary>
/// Constructs the name of the valid file by joining the provided parameters with "_" while replacing invalid chars with "^".
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="maximumFileNameAllowed">The maximum length of the file name that will be returned.</param>
/// <returns>
/// A valid file name.
/// </returns>
internal static string ConstructValidFileName(string monitoringAccount, int maximumFileNameAllowed)
{
return ConstructValidFileName(monitoringAccount, string.Empty, string.Empty, string.Empty, JsonFileExtension, maximumFileNameAllowed);
}
/// <summary>
/// Convert path to valid folder name
/// </summary>
/// <param name="path">The path needs to be converted</param>
/// <returns>Valid folder name</returns>
internal static string ConvertPathToValidFolderName(string path)
{
var builder = new StringBuilder(path);
RepalceInvalidFileChars(builder);
return builder.ToString();
}
/// <summary>
/// Appends the shorted file part to the final string.
/// </summary>
/// <param name="builder">The builder containing the final string.</param>
/// <param name="value">The value to add to the string.</param>
/// <param name="totalAllowedLength">Total length of the final string allowed.</param>
/// <param name="partsRemaining">The parts remaining that will be added to the final string.</param>
private static void AppendShortedFilePart(StringBuilder builder, string value, int totalAllowedLength, ref int partsRemaining)
{
if (string.IsNullOrWhiteSpace(value))
{
// Do not count this as a part since it was empty to begin with.
return;
}
var currentMaxAllowed = (totalAllowedLength - builder.Length) / partsRemaining;
--partsRemaining;
if (value.Length > currentMaxAllowed)
{
var partLengths = (currentMaxAllowed - 1) / 2;
builder.AppendFormat(
"{0}{1}~{2}",
builder.Length == 0 ? string.Empty : "_",
value.Substring(0, partLengths),
value.Substring(value.Length - partLengths));
}
else
{
builder.AppendFormat(
"{0}{1}",
builder.Length == 0 ? string.Empty : "_",
value);
}
}
/// <summary>
/// Replace invalid file characters
/// </summary>
/// <param name="builder">string builder</param>
private static void RepalceInvalidFileChars(StringBuilder builder)
{
for (int i = 0; i < builder.Length; ++i)
{
if (!char.IsLetter(builder[i]) && !char.IsDigit(builder[i]))
{
if (Array.BinarySearch(SortedInvalidFileChars, builder[i]) >= 0)
{
builder[i] = '^';
}
}
}
}
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IMetricConfigurationManager.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
/// <summary>
/// This interface that manages metric configurations.
/// </summary>
public interface IMetricConfigurationManager
{
/// <summary>
/// Get the metric specified by the account, namespace and metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">The metric name.</param>
/// <returns>The metric.</returns>
Task<IMetricConfiguration> GetAsync(IMonitoringAccount monitoringAccount, string metricNamespace, string metricName);
/// <summary>
/// Get the metric specified by the account, namespace and metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="returnEmptyConfig">Determine if empty, unmodified configurations should be returned.</param>
/// <returns>The metrics that match the criteria.</returns>
Task<IReadOnlyList<IMetricConfiguration>> GetMultipleAsync(IMonitoringAccount monitoringAccount, string metricNamespace, bool returnEmptyConfig = false);
/// <summary>
/// Get the metric specified by the account, namespace and metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="returnEmptyConfig">Determine if empty, unmodified configurations should be returned.</param>
/// <returns>The metrics that match the criteria.</returns>
Task<IReadOnlyList<IMetricConfiguration>> GetMultipleAsync(IMonitoringAccount monitoringAccount, bool returnEmptyConfig = false);
/// <summary>
/// Save the metric configuration provided.
/// </summary>
/// <param name="monitoringAccount">The monitoring account configuration.</param>
/// <param name="metricConfiguration">The metric to be saved.</param>
/// <param name="skipVersionCheck">Flag indicating whether or not the version flag should be honored.</param>
/// <returns>A task the caller can wait on.</returns>
Task SaveAsync(IMonitoringAccount monitoringAccount, IMetricConfiguration metricConfiguration, bool skipVersionCheck = false);
/// <summary>
/// Deletes the metric configuration by metric name.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <returns>A task the caller can wait on.</returns>
Task DeleteAsync(IMonitoringAccount monitoringAccount, string metricNamespace, string metricName);
/// <summary>
/// Sync all metric configuration across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <returns>A list of configuration update result.</returns>
Task<IReadOnlyList<IConfigurationUpdateResult>> SyncAllAsync(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false);
/// <summary>
/// Sync all metric configuration across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="metricNamespace">Metric namespace.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <returns>A list of configuration update result.</returns>
Task<IReadOnlyList<IConfigurationUpdateResult>> SyncAllAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
bool skipVersionCheck = false);
/// <summary>
/// Sync all metric and monitor configuration across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <returns>A list of <see cref="ConfigurationUpdateResultList"/>.</returns>
Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncAllAsyncV2(
IMonitoringAccount monitoringAccount,
bool skipVersionCheck = false);
/// <summary>
/// Sync all metric and monitor configuration across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="metricNamespace">Metric namespace.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <returns>A list of <see cref="ConfigurationUpdateResultList"/>.</returns>
Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncAllAsyncV2(
IMonitoringAccount monitoringAccount,
string metricNamespace,
bool skipVersionCheck = false);
/// <summary>
/// Sync metric and monitor configuration for given metric name across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">Monitoring account.</param>
/// <param name="metricNamespace">Metric namespace.</param>
/// <param name="metricName">Metric name.</param>
/// <param name="skipVersionCheck">True if skip version check.</param>
/// <returns>An instance of <see cref="ConfigurationUpdateResultList"/>.</returns>
Task<ConfigurationUpdateResultList> SyncConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
string metricName,
bool skipVersionCheck = false);
/// <summary>
/// Download metric configurations as json files.
/// </summary>
/// <param name="destinationFolder">folder for storing downloaded config json files.</param>
/// <param name="monitoringAccount">Monitoring account</param>
/// <param name="metricNamespace">metric namespace</param>
/// <param name="metricName">metric name</param>
/// <param name="metricNameRegex">metric name regex.</param>
/// <param name="foldersOnNamespacesLevel">indicate if true, config files will be stored under corresponding namespaces folders.</param>
/// <param name="downloadDefaultMetricConfig">indicate if true, default comfigs will also be downloaded.</param>
/// <param name="maxFileNameProducedLength">max size of file name that will be created locally.</param>
/// <returns> OperationStatus.</returns>
Task<OperationStatus> DownloadMetricConfigurationAsync(
string destinationFolder,
IMonitoringAccount monitoringAccount,
string metricNamespace = null,
string metricName = null,
Regex metricNameRegex = null,
bool foldersOnNamespacesLevel = false,
bool downloadDefaultMetricConfig = false,
int maxFileNameProducedLength = 256);
/// <summary>
/// Modify local metric configuration json files by replacing AccountName.
/// </summary>
/// <param name="sourceFolder">folder in which config files locate.</param>
/// <param name="monitoringAccount">Monitoring account</param>
/// <param name="replaceAccountNameWith">account name to replace original account with in local config files.</param>
/// <param name="metricNameRegex">metric name regex.</param>
/// <returns> OperationStatus.</returns>
Task<OperationStatus> ReplaceAccountNameInMetricConfigurationFilesAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
string replaceAccountNameWith,
Regex metricNameRegex = null);
/// <summary>
/// Modify local metric configuration json files by replacing Namespace.
/// </summary>
/// <param name="sourceFolder">folder in which config files locate.</param>
/// <param name="monitoringAccount">Monitoring account</param>
/// <param name="replaceNamespaceWith">namespace to replace original namespace with in local config files.</param>
/// <param name="metricNameRegex">metric name regex.</param>
/// <returns> OperationStatus.</returns>
Task<OperationStatus> ReplaceNamespaceInMetricConfigurationFilesAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
string replaceNamespaceWith,
Regex metricNameRegex = null);
/// <summary>
/// Upload all metric configurations from the given folder to the same monitoring account.
/// </summary>
/// <param name="sourceFolder">folder in which config files locate.</param>
/// <param name="monitoringAccount">Monitoring account</param>
/// <param name="force">indicate if true, skip version check for metric config against existing metric config version.</param>
/// <returns> OperationStatus.</returns>
Task<OperationStatus> UploadMetricConfigurationAsync(
string sourceFolder,
IMonitoringAccount monitoringAccount,
bool force = false);
/// <summary>
/// Apply metric configuration from local template file to many metrics under one monitoring account.
/// </summary>
/// <param name="templateFilePath">absolute path of template metric configuration json file.</param>
/// <param name="monitoringAccount">Monitoring account</param>
/// <param name="metricNamespace">metric namespace</param>
/// <param name="metricName">metric name</param>
/// <param name="metricNameRegex">metric name regex</param>
/// <param name="force">indicate if true, will overwrite existing config on server and skip version check.</param>
/// <param name="whatIf">indicate if true, show the template config without actually uploading the config</param>
/// <returns> OperationStatus.</returns>
Task<OperationStatus> ApplyTemplateMetricConfigurationAsync(
string templateFilePath,
IMonitoringAccount monitoringAccount,
string metricNamespace = null,
string metricName = null,
Regex metricNameRegex = null,
bool force = false,
bool whatIf = false);
/// <summary>
/// Synchronizes all metrics configurations only asynchronously across mirror accounts.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="skipVersionCheck">if set to <c>true</c> [skip version check].</param>
/// <returns>A list of <see cref="ConfigurationUpdateResultList"/>.</returns>
Task<IReadOnlyList<ConfigurationUpdateResultList>> SyncAllMetricsAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace = null,
bool skipVersionCheck = false);
/// <summary>
/// Synchronizes the metric configuration only across mirror accounts asynchronously.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="skipVersionCheck">if set to <c>true</c> [skip version check].</param>
/// <returns>An instance of <see cref="ConfigurationUpdateResultList"/>.</returns>
Task<ConfigurationUpdateResultList> SyncMetricConfigurationAsync(
IMonitoringAccount monitoringAccount,
string metricNamespace,
string metricName,
bool skipVersionCheck = false);
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MonitorReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Monitors
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Metrics;
using Microsoft.Cloud.Metrics.Client.Utility;
using Microsoft.Online.Metrics.Serialization;
using Microsoft.Online.Metrics.Serialization.Configuration;
using Microsoft.Online.Metrics.Serialization.Monitor;
using Newtonsoft.Json;
/// <summary>
/// The class to query monitor health status.
/// </summary>
public sealed class MonitorReader : IMonitorReader
{
/// <summary>
/// The number of attempts.
/// </summary>
public const int NumAttempts = 3;
#pragma warning disable SA1401 // Fields must be private
/// <summary>
/// The relative URL for health controller
/// </summary>
public readonly string HealthRelativeUrl;
/// <summary>
/// The V2 relative URL for metrics configuration
/// </summary>
public readonly string ConfigRelativeUrlV2;
#pragma warning restore SA1401 // Fields must be private
/// <summary>
/// The status sampling type.
/// </summary>
private static readonly SamplingType StatusSamplingType = new SamplingType("Status");
/// <summary>
/// The HTTP client instance.
/// </summary>
private readonly HttpClient httpClient;
/// <summary>
/// The connection information.
/// </summary>
private readonly ConnectionInfo connectionInfo;
/// <summary>
/// Initializes a new instance of the <see cref="MonitorReader"/> class.
/// </summary>
/// <param name="connectionInfo">The connection information.</param>
public MonitorReader(ConnectionInfo connectionInfo)
{
if (connectionInfo == null)
{
throw new ArgumentNullException(nameof(connectionInfo));
}
this.connectionInfo = connectionInfo;
this.HealthRelativeUrl = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.HealthRelativeUrl);
this.ConfigRelativeUrlV2 = this.connectionInfo.GetAuthRelativeUrl(MetricsServerRelativeUris.ConfigRelativeUrlV2);
this.httpClient = HttpClientHelper.CreateHttpClientWithAuthInfo(connectionInfo);
this.GetResponseAsStringDelegate = HttpClientHelper.GetResponse;
}
/// <summary>
/// Gets or sets the delegate to get web response as string.
/// </summary>
/// <remarks>For unit testing.</remarks>
internal Func<Uri, HttpMethod, HttpClient, string, string, object, string, string, Guid?, byte, int, Task<Tuple<string, HttpResponseMessage>>> GetResponseAsStringDelegate { get; set; }
/// <summary>
/// Gets the monitors for the given <paramref name="metricIdentifier"/>.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <returns>The monitors for the given <paramref name="metricIdentifier"/>.</returns>
public async Task<IReadOnlyList<MonitorIdentifier>> GetMonitorsAsync(MetricIdentifier metricIdentifier)
{
metricIdentifier.Validate();
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}/monitorIDs",
this.connectionInfo.GetEndpoint(metricIdentifier.MonitoringAccount),
this.ConfigRelativeUrlV2,
metricIdentifier.MonitoringAccount,
SpecialCharsHelper.EscapeTwice(metricIdentifier.MetricNamespace),
SpecialCharsHelper.EscapeTwice(metricIdentifier.MetricName));
var response = await this.GetResponseAsStringDelegate(
new Uri(url),
HttpMethod.Get,
this.httpClient,
metricIdentifier.MonitoringAccount,
this.ConfigRelativeUrlV2,
null,
string.Empty,
null,
null,
MetricQueryResponseDeserializer.CurrentVersion,
NumAttempts).ConfigureAwait(false);
return JsonConvert.DeserializeObject<MonitorIdentifier[]>(response.Item1);
}
/// <summary>
/// Gets the monitor IDs for the given monitoring account, optionally with the metric namespace.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <returns>
/// The monitor IDs for the given monitoring account, optionally with the metric namespace.
/// </returns>
public async Task<IReadOnlyList<MonitorIdentifier>> GetMonitorsAsync(string monitoringAccount, string metricNamespace = null)
{
if (string.IsNullOrWhiteSpace(monitoringAccount))
{
throw new ArgumentException("monitoringAccount is null or empty.", monitoringAccount);
}
string namespaceSegments = string.Empty;
if (!string.IsNullOrWhiteSpace(metricNamespace))
{
namespaceSegments = string.Format("metricNamespace/{0}", SpecialCharsHelper.EscapeTwice(metricNamespace));
}
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/{3}/monitorIDs",
this.connectionInfo.GetEndpoint(monitoringAccount),
this.ConfigRelativeUrlV2,
monitoringAccount,
namespaceSegments);
var response = await this.GetResponseAsStringDelegate(
new Uri(url),
HttpMethod.Get,
this.httpClient,
monitoringAccount,
this.ConfigRelativeUrlV2,
null,
string.Empty,
null,
null,
MetricQueryResponseDeserializer.CurrentVersion,
NumAttempts).ConfigureAwait(false);
return JsonConvert.DeserializeObject<MonitorIdentifier[]>(response.Item1);
}
/// <summary>
/// Gets the current heath status asynchronous.
/// Deprecated due to wrong spelling ("Heath" instead of "Health") Exists for backward compatibility
/// </summary>
/// <param name="monitorInstanceDefinition">The monitor instance definition.</param>
/// <returns>
/// Monitor health status.
/// </returns>
public async Task<IMonitorHealthStatus> GetCurrentHeathStatusAsync(TimeSeriesDefinition<MonitorIdentifier> monitorInstanceDefinition)
{
return await this.GetCurrentHealthStatusAsync(monitorInstanceDefinition).ConfigureAwait(false);
}
/// <summary>
/// Gets the current monitor health status.
/// </summary>
/// <param name="monitorInstanceDefinition">The monitor instance definition.</param>
/// <returns>
/// The current monitor health status.
/// </returns>
public async Task<IMonitorHealthStatus> GetCurrentHealthStatusAsync(TimeSeriesDefinition<MonitorIdentifier> monitorInstanceDefinition)
{
if (monitorInstanceDefinition == null)
{
throw new ArgumentNullException(nameof(monitorInstanceDefinition));
}
var statuses = await this.GetMultipleCurrentHeathStatusesAsync(monitorInstanceDefinition).ConfigureAwait(false);
return statuses.First().Value;
}
/// <summary>
/// Batched API to get the current monitor health statuses.
/// </summary>
/// <param name="monitorInstanceDefinitions">The monitor instance definitions.</param>
/// <returns>
/// The current monitor health statuses.
/// </returns>
public Task<IReadOnlyList<KeyValuePair<TimeSeriesDefinition<MonitorIdentifier>, IMonitorHealthStatus>>> GetMultipleCurrentHeathStatusesAsync(
params TimeSeriesDefinition<MonitorIdentifier>[] monitorInstanceDefinitions)
{
return this.GetMultipleCurrentHeathStatusesAsync(monitorInstanceDefinitions.AsEnumerable());
}
/// <summary>
/// Batched API to get the current monitor health statuses.
/// </summary>
/// <param name="monitorInstanceDefinitions">The monitor instance definitions.</param>
/// <returns>
/// The current monitor health statuses.
/// </returns>
public async Task<IReadOnlyList<KeyValuePair<TimeSeriesDefinition<MonitorIdentifier>, IMonitorHealthStatus>>> GetMultipleCurrentHeathStatusesAsync(
IEnumerable<TimeSeriesDefinition<MonitorIdentifier>> monitorInstanceDefinitions)
{
if (monitorInstanceDefinitions == null)
{
throw new ArgumentNullException("monitorInstanceDefinitions");
}
var definitionList = monitorInstanceDefinitions.ToList();
if (definitionList.Count == 0)
{
throw new ArgumentException("The count of 'monitorInstanceDefinitions' is 0.");
}
if (definitionList.Any(d => d == null))
{
throw new ArgumentException("At least one of monitorInstanceDefinitions are null.");
}
var monitorIdentifier = definitionList[0].Id;
var dimensionCombinationList = new List<Dictionary<string, string>>(definitionList.Count);
foreach (var definition in definitionList)
{
if (!definition.Id.Equals(monitorIdentifier))
{
throw new MetricsClientException("All the time series definitions must have the same monitor identifier.");
}
var dict = new Dictionary<string, string>();
if (definition.DimensionCombination != null)
{
foreach (var kvp in definition.DimensionCombination)
{
dict[kvp.Key] = kvp.Value;
}
}
dimensionCombinationList.Add(dict);
}
string operation = $"{this.HealthRelativeUrl}/batchedRead";
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}/monitorId/{5}",
this.connectionInfo.GetEndpoint(monitorIdentifier.MetricIdentifier.MonitoringAccount),
operation,
monitorIdentifier.MetricIdentifier.MonitoringAccount,
SpecialCharsHelper.EscapeTwice(monitorIdentifier.MetricIdentifier.MetricNamespace),
SpecialCharsHelper.EscapeTwice(monitorIdentifier.MetricIdentifier.MetricName),
SpecialCharsHelper.EscapeTwice(monitorIdentifier.MonitorId));
var response = await this.GetResponseAsStringDelegate(
new Uri(url),
HttpMethod.Post,
this.httpClient,
monitorIdentifier.MetricIdentifier.MonitoringAccount,
operation,
dimensionCombinationList,
string.Empty,
null,
null,
MetricQueryResponseDeserializer.CurrentVersion,
NumAttempts).ConfigureAwait(false);
var deserializeObject = JsonConvert.DeserializeObject<List<MonitorHealthStatus>>(response.Item1);
var results = new KeyValuePair<TimeSeriesDefinition<MonitorIdentifier>, IMonitorHealthStatus>[deserializeObject.Count];
for (int i = 0; i < deserializeObject.Count; ++i)
{
var status = new KeyValuePair<TimeSeriesDefinition<MonitorIdentifier>, IMonitorHealthStatus>(definitionList[i], deserializeObject[i]);
results[i] = status;
}
return results;
}
/// <summary>
/// Gets the monitor history.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="monitorInstanceDefinition">The monitor instance definition.</param>
/// <returns>
/// The monitor health history for each minute in the provided time range.
/// true means healthy, false means unhealthy, and null means that the monitor didn't report a health status for that minute.
/// </returns>
[Obsolete("We are going to retire this. Please use GetBatchWatchdogHealthHistory in Health SDK instead.")]
public async Task<TimeSeries<MonitorIdentifier, bool?>> GetMonitorHistoryAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
TimeSeriesDefinition<MonitorIdentifier> monitorInstanceDefinition)
{
if (monitorInstanceDefinition == null)
{
throw new ArgumentNullException("monitorInstanceDefinition");
}
if (startTimeUtc > endTimeUtc)
{
throw new ArgumentException(string.Format("startTimeUtc [{0}] must be <= endTimeUtc [{1}]", startTimeUtc, endTimeUtc));
}
startTimeUtc = new DateTime(startTimeUtc.Ticks / TimeSpan.TicksPerMinute * TimeSpan.TicksPerMinute);
endTimeUtc = new DateTime(endTimeUtc.Ticks / TimeSpan.TicksPerMinute * TimeSpan.TicksPerMinute);
string dimensionsFlattened = null;
if (monitorInstanceDefinition.DimensionCombination != null)
{
dimensionsFlattened = string.Join(
"/",
monitorInstanceDefinition.DimensionCombination.Select(
d => string.Join("/", SpecialCharsHelper.EscapeTwice(d.Key), SpecialCharsHelper.EscapeTwice(d.Value))));
}
string operation = $"{this.HealthRelativeUrl}/history";
string url = string.Format(
"{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}/monitorId/{5}/from/{6}/to/{7}{8}",
this.connectionInfo.GetEndpoint(monitorInstanceDefinition.Id.MetricIdentifier.MonitoringAccount),
operation,
monitorInstanceDefinition.Id.MetricIdentifier.MonitoringAccount,
SpecialCharsHelper.EscapeTwice(monitorInstanceDefinition.Id.MetricIdentifier.MetricNamespace),
SpecialCharsHelper.EscapeTwice(monitorInstanceDefinition.Id.MetricIdentifier.MetricName),
SpecialCharsHelper.EscapeTwice(monitorInstanceDefinition.Id.MonitorId),
UnixEpochHelper.GetMillis(startTimeUtc),
UnixEpochHelper.GetMillis(endTimeUtc),
dimensionsFlattened != null ? "/" + dimensionsFlattened : string.Empty);
var response = await this.GetResponseAsStringDelegate(
new Uri(url),
HttpMethod.Get,
this.httpClient,
monitorInstanceDefinition.Id.MetricIdentifier.MonitoringAccount,
operation,
null,
string.Empty,
null,
null,
MetricQueryResponseDeserializer.CurrentVersion,
NumAttempts).ConfigureAwait(false);
var values = JsonConvert.DeserializeObject<List<bool?>>(response.Item1);
return new TimeSeries<MonitorIdentifier, bool?>(startTimeUtc, endTimeUtc, SerializationConstants.DefaultSeriesResolutionInMinutes, monitorInstanceDefinition, new List<List<bool?>> { values }, TimeSeriesErrorCode.Success);
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IPermissionV2.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// This object represents one entity and their level of access to an MDM Account.
/// </summary>
public interface IPermissionV2
{
/// <summary>
/// The identity to grant permission.
/// </summary>
string Identity { get; }
/// <summary>
/// Gets the description of the permission.
/// </summary>
/// <remarks>Description is always null for <see cref="SecurityGroupV2"/> and <see cref="UserPermissionV2"/> class.</remarks>
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
string Description { get; }
/// <summary>
/// The level of access to be granted to this identity.
/// </summary>
RoleConfiguration RoleConfiguration { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IMonitorReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Monitors
{
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Online.Metrics.Serialization.Configuration;
using Microsoft.Online.Metrics.Serialization.Monitor;
/// <summary>
/// The interface to query monitor health status.
/// </summary>
public interface IMonitorReader
{
/// <summary>
/// Gets the monitors for the given <paramref name="metricIdentifier"/>.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <returns>The monitors for the given <paramref name="metricIdentifier"/>.</returns>
Task<IReadOnlyList<MonitorIdentifier>> GetMonitorsAsync(MetricIdentifier metricIdentifier);
/// <summary>
/// Gets the monitor IDs for the given monitoring account, optionally with the metric namespace.
/// </summary>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <returns>
/// The monitor IDs for the given monitoring account, optionally with the metric namespace.
/// </returns>
Task<IReadOnlyList<MonitorIdentifier>> GetMonitorsAsync(string monitoringAccount, string metricNamespace = null);
/// <summary>
/// Gets the current heath status asynchronous.
/// </summary>
/// <param name="monitorInstanceDefinition">The monitor instance definition.</param>
/// <returns>Monitor health status.</returns>
[System.Obsolete("Deprecated, please use GetCurrentHealthStatusAsync (notice fixed spelling) instead.")]
Task<IMonitorHealthStatus> GetCurrentHeathStatusAsync(TimeSeriesDefinition<MonitorIdentifier> monitorInstanceDefinition);
/// <summary>
/// Gets the current monitor health status.
/// </summary>
/// <param name="monitorInstanceDefinition">The monitor instance definition.</param>
/// <returns>
/// The current monitor health status.
/// </returns>
Task<IMonitorHealthStatus> GetCurrentHealthStatusAsync(TimeSeriesDefinition<MonitorIdentifier> monitorInstanceDefinition);
/// <summary>
/// Batched API to get the current monitor health statuses.
/// </summary>
/// <param name="monitorInstanceDefinitions">The monitor instance definitions.</param>
/// <returns>
/// The current monitor health statuses.
/// </returns>
Task<IReadOnlyList<KeyValuePair<TimeSeriesDefinition<MonitorIdentifier>, IMonitorHealthStatus>>> GetMultipleCurrentHeathStatusesAsync(
params TimeSeriesDefinition<MonitorIdentifier>[] monitorInstanceDefinitions);
/// <summary>
/// Batched API to get the current monitor health statuses.
/// </summary>
/// <param name="monitorInstanceDefinitions">The monitor instance definitions.</param>
/// <returns>
/// The current monitor health statuses.
/// </returns>
Task<IReadOnlyList<KeyValuePair<TimeSeriesDefinition<MonitorIdentifier>, IMonitorHealthStatus>>> GetMultipleCurrentHeathStatusesAsync(
IEnumerable<TimeSeriesDefinition<MonitorIdentifier>> monitorInstanceDefinitions);
/// <summary>
/// Gets the monitor history.
/// </summary>
/// <param name="startTimeUtc">The start time UTC.</param>
/// <param name="endTimeUtc">The end time UTC.</param>
/// <param name="monitorInstanceDefinition">The monitor instance definition.</param>
/// <returns>
/// The monitor health history for each minute in the provided time range.
/// true means healthy, false means unhealthy, and null means that the monitor didn't report a health status for that minute.
/// </returns>
[Obsolete("We are going to retire this. Please use GetBatchWatchdogHealthHistory in Health SDK instead.")]
Task<TimeSeries<MonitorIdentifier, bool?>> GetMonitorHistoryAsync(
DateTime startTimeUtc,
DateTime endTimeUtc,
TimeSeriesDefinition<MonitorIdentifier> monitorInstanceDefinition);
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="FilteredTimeSeries.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System.Collections.Generic;
using System.Text;
using Metrics;
using Microsoft.Online.Metrics.Serialization.Configuration;
/// <summary>
/// Represents a single time series that was selected and is being returned as an item in <see cref="FilteredTimeSeriesQueryResponse"/>.
/// </summary>
public sealed class FilteredTimeSeries : IFilteredTimeSeries, IQueryResultV3
{
/// <summary>
/// Initializes a new instance of the <see cref="FilteredTimeSeries" /> class.
/// Create a single time series.
/// </summary>
/// <param name="metricIdentifier">The metric identifier.</param>
/// <param name="dimensionList">The list of dimensions.</param>
/// <param name="evaluatedResult">The evaluated result.</param>
/// <param name="seriesValues">The time series values, only included if specified in the query.</param>
internal FilteredTimeSeries(
MetricIdentifier metricIdentifier,
IReadOnlyList<KeyValuePair<string, string>> dimensionList,
double evaluatedResult,
IReadOnlyList<KeyValuePair<SamplingType, double[]>> seriesValues)
{
this.MetricIdentifier = metricIdentifier;
this.DimensionList = dimensionList;
this.EvaluatedResult = evaluatedResult;
this.TimeSeriesValues = seriesValues;
}
/// <summary>
/// Gets the metric identifier.
/// </summary>
public MetricIdentifier MetricIdentifier { get; }
/// <summary>
/// Set of valid dimension name-value pairs that meet the query condition.
/// </summary>
public IReadOnlyList<KeyValuePair<string, string>> DimensionList { get; }
/// <summary>
/// Gets the evaluated value for this time series that meets the condition set in the query (provided for evidence and/or sorting).
/// </summary>
public double EvaluatedResult { get; }
/// <summary>
/// Gets the full collection time series values for the query interval. It should be null if
/// the query did not request the full collection of values to be returned.
/// </summary>
public IReadOnlyList<KeyValuePair<SamplingType, double[]>> TimeSeriesValues { get; }
/// <summary>
/// Gets the time series values for the requested sampling type.
/// </summary>
/// <param name="samplingType">The sampling type requested.</param>
/// <returns>The array of datapoints for the requested sampling type/</returns>
/// <exception cref="System.Collections.Generic.KeyNotFoundException">Thrown if the sampling type was not included in the response.</exception>
/// <remarks>
/// double.NaN is the sentinel used to indicate there is no metric value.
/// </remarks>
public double[] GetTimeSeriesValues(SamplingType samplingType)
{
for (var i = 0; i < this.TimeSeriesValues.Count; ++i)
{
if (samplingType.Equals(this.TimeSeriesValues[i].Key))
{
return this.TimeSeriesValues[i].Value;
}
}
throw new KeyNotFoundException($"Sampling type {samplingType} not found in the query result.");
}
/// <summary>
/// Returns a <see cref="string" /> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="string" /> that represents this instance.
/// </returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.AppendLine($"EvaluatedResult: {this.EvaluatedResult}");
sb.Append("Dimensions:");
foreach (var pair in this.DimensionList)
{
sb.Append($"{pair.Key}: {pair.Value};");
}
sb.AppendLine();
if (this.TimeSeriesValues != null && this.TimeSeriesValues.Count > 0)
{
sb.Append("[");
for (int i = 0; i < this.TimeSeriesValues.Count; i++)
{
sb.Append("[");
sb.Append($"{this.TimeSeriesValues[i].Key}, ");
sb.Append(string.Join(", ", this.TimeSeriesValues[i].Value));
sb.AppendLine("]");
}
sb.AppendLine("]");
}
return sb.ToString();
}
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MonitoringAccountAcls.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Utility;
/// <summary>
/// Represents a list of ACLs associated with a monitoring account.
/// </summary>
/// <remarks>
/// This does not include AP PKI as the cert is automatically generated and the ACL itself is not needed client side.
/// </remarks>
internal sealed class MonitoringAccountAcls : IMonitoringAccountAcls
{
/// <summary>
/// Gets or sets the thumbprints.
/// </summary>
[JsonProperty(PropertyName = "tps")]
public List<string> Thumbprints { get; set; }
/// <summary>
/// Gets or sets the dSMS acls.
/// </summary>
[JsonProperty(PropertyName = "dacls")]
public List<string> DsmsAcls { get; set; }
/// <summary>
/// Gets or sets the KeyVault acls.
/// </summary>
[JsonProperty(PropertyName = "kvacls")]
public List<string> KeyVaultAcls { get; set; }
/// <summary>
/// Gets the ACLs definied for the specified monitoring account
/// </summary>
/// <param name="accountName">Name of the account.</param>
/// <param name="targetStampEndpoint">If not a production account, allows the target stamp to be overridden. In most cases, allowing to default is appropriate.</param>
/// <param name="includeReadOnly">True if the set should include those with read only access, otherwise false to return those with higher rights.</param>
/// <returns>Acls for monitoring account.</returns>
/// <exception cref="System.ArgumentNullException">accountName</exception>
public static async Task<IMonitoringAccountAcls> GetAcls(string accountName, string targetStampEndpoint = "https://global.metrics.nsatc.net", bool includeReadOnly = true)
{
if (string.IsNullOrWhiteSpace(accountName))
{
throw new ArgumentNullException(nameof(accountName));
}
var client = HttpClientHelper.CreateHttpClient(TimeSpan.FromMinutes(1));
var requestUri = $"{targetStampEndpoint}/public/monitoringAccount/{accountName}/acls?includeReadOnly={includeReadOnly}";
var result = await HttpClientHelper.GetResponse(new Uri(requestUri), HttpMethod.Get, client, null, null).ConfigureAwait(false);
return JsonConvert.DeserializeObject<MonitoringAccountAcls>(result.Item1);
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="IMetricBuilder.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// The interface used by deserializer to abstract the way how deserialized objects are created.
/// It is up to the user of the deserializer to implement type to which data are deserialized, use object pools or not etc.
/// </summary>
/// <typeparam name="TMetadata">Type of metadata to use for creation of metric data objects.</typeparam>
public interface IMetricBuilder<TMetadata>
where TMetadata : IMetricMetadata
{
/// <summary>
/// Sets the packet serialization version.
/// </summary>
/// <param name="serializationVersion">Serialization version.</param>
void SetSerializationVersion(ushort serializationVersion);
/// <summary>
/// Creates the custom object representing metric metadata. Note that the same metadata object can be shared
/// by many metric instances.
/// </summary>
/// <param name="metricNamespace">Namespace of the metric.</param>
/// <param name="metricName">Name of the metric.</param>
/// <param name="dimensionNames">Names of the metric dimensions.</param>
/// <returns>The metric metadata representing the given parameters.</returns>
TMetadata CreateMetadata(string metricNamespace, string metricName, IEnumerable<string> dimensionNames);
/// <summary>
/// Signals to the builder that it should get ready to start filling up data for a new metric.
/// </summary>
void BeginMetricCreation();
/// <summary>
/// Assigns metadata to the metric being built.
/// </summary>
/// <param name="metadata">Metric metadata.</param>
void AssignMetadata(TMetadata metadata);
/// <summary>
/// Assigns a monitoring account to the metric being built. This is only called if the aggregated metric data was serialized
/// with a version prior to v3. For v3 and above the monitoring account should be captured on the request itself, as part of the
/// URL path or as a query parameter.
/// </summary>
/// <param name="value">
/// Monitoring account associated with the metric.
/// </param>
/// <remarks>
/// If we ever make this interface public we should check if there are no more clients submitting data prior to v3 and remove
/// this method from the interface to avoid confusion from the users.
/// </remarks>
void AssignMonitoringAccount(string value);
/// <summary>
/// Assigns a namespace to the metric being built.
/// </summary>
/// <param name="value">Namespace associated with the metric.</param>
void AssignNamespace(string value);
/// <summary>
/// Assigns time (UTC) to the metric being built.
/// </summary>
/// <param name="value">Time of the metric.</param>
void AssignTimeUtc(DateTime value);
/// <summary>
/// Adds the value of a single dimension to the metric being built. The dimension values are passed in the same
/// order as the corresponding dimensions on the metric metadata.
/// </summary>
/// <param name="value">Value of one dimension of the metric.</param>
void AddDimensionValue(string value);
/// <summary>
/// Assigns the sampling types of the metric being built.
/// </summary>
/// <param name="value">Available sampling types of the metric.</param>
void AssignSamplingTypes(SamplingTypes value);
/// <summary>
/// Assigns the minimum value of the metric being built.
/// </summary>
/// <param name="value">Minimum value of the metric.</param>
void AssignMin(ulong value);
/// <summary>
/// Assigns the maximum value of the metric being built.
/// </summary>
/// <param name="value">Maximum value of the metric.</param>
void AssignMax(ulong value);
/// <summary>
/// Assigns the sum value of the metric being built.
/// </summary>
/// <param name="value">Sum value of the metric.</param>
void AssignSum(ulong value);
/// <summary>
/// Assigns the sum of square differences from mean value of the metric being built.
/// </summary>
/// <param name="value">Sum of square differences from mean of the value of the metric.</param>
void AssignSumOfSquareDiffFromMean(double value);
/// <summary>
/// Assigns the count (i.e.: how many times it was logged) value of the metric being built.
/// </summary>
/// <param name="value">Count value of the metric.</param>
void AssignCount(uint value);
/// <summary>
/// Assigns a histogram to the metric being built.
/// </summary>
/// <param name="value">Histogram of the metric.</param>
void AssignHistogram(IReadOnlyList<KeyValuePair<ulong, uint>> value);
/// <summary>
/// Assigns hyperloglogsketches to the metric being built.
/// </summary>
/// <param name="reader">Stream containing the data.</param>
/// <param name="length">Length of data to read.</param>
void AssignHyperLogLogSketch(BinaryReader reader, int length);
/// <summary>
/// Signals to the builder that the creation of the current metric was completed.
/// </summary>
void EndMetricCreation();
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="LocalMetricReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Etw;
using Logging;
/// <summary>
/// The class for consumption of locally aggregated metrics or local raw metrics.
/// </summary>
public sealed unsafe class LocalMetricReader : ILocalMetricReader
{
/// <summary>
/// Prefix added to ETW sessions that are using the configuration of a collector.
/// </summary>
private const string EtwSessionsPrefix = "LocalMetricReader-";
/// <summary>
/// Custom log id to be used in the log statements.
/// </summary>
private static readonly object LogId = Logger.CreateCustomLogId("LocalMetricReader");
/// <summary>
/// The aggregated metrics provider unique identifier.
/// </summary>
private static readonly Guid AggregatedMetricsProviderGuid = new Guid("{2F23A2A9-0DE7-4CB4-A778-FBDF5C1E7372}");
/// <summary>
/// The raw metrics etw provider unique identifier.
/// </summary>
private static readonly Guid RawMetricsEtwProviderGuid = new Guid("{EDC24920-E004-40F6-A8E1-0E6E48F39D84}");
/// <summary>
/// Gets or sets a value indicating whether [enable verbose logging].
/// </summary>
public bool EnableVerboseLogging { get; set; }
/// <summary>
/// Reads the local raw metrics.
/// </summary>
/// <param name="metricProducedAction">The action to execute when metric available.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="etlFileName">The name of the etw file from when read data. If null, realtime session will be used.</param>
/// <returns>
/// An awaitable <see cref="Task" />.
/// </returns>
public Task ReadLocalRawMetricsAsync(
Action<ILocalRawMetric> metricProducedAction,
CancellationToken cancellationToken,
string etlFileName = null)
{
if (metricProducedAction == null)
{
throw new ArgumentNullException("metricProducedAction");
}
if (cancellationToken == null)
{
throw new ArgumentNullException("cancellationToken");
}
if (!this.EnableVerboseLogging)
{
Logger.SetMaxLogLevel(LoggerLevel.Error);
}
ActiveCollector activeCollector = null;
Task task = Task.Factory.StartNew(
() =>
{
try
{
Task.Factory.StartNew(
() =>
SetupRawMetricListener(
RawMetricsEtwProviderGuid,
etlFileName,
metricProducedAction,
cancellationToken,
out activeCollector),
TaskCreationOptions.LongRunning);
cancellationToken.WaitHandle.WaitOne();
}
finally
{
StopEtwSession(activeCollector);
}
},
TaskCreationOptions.LongRunning);
Console.CancelKeyPress += (sender, args) => StopEtwSession(activeCollector);
return task;
}
/// <summary>
/// Reads the locally aggregated metrics.
/// </summary>
/// <param name="metricProducedAction">The action to execute when metric available.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="etlFileName">The name of the etw file from when read data. If null, realtime session will be used.</param>
/// <returns>
/// An awaitable <see cref="Task" />.
/// </returns>
public Task ReadLocalAggregatedMetricsAsync(
Action<ILocalAggregatedMetric> metricProducedAction,
CancellationToken cancellationToken,
string etlFileName = null)
{
if (metricProducedAction == null)
{
throw new ArgumentNullException("metricProducedAction");
}
if (cancellationToken == null)
{
throw new ArgumentNullException("cancellationToken");
}
if (!this.EnableVerboseLogging)
{
Logger.SetMaxLogLevel(LoggerLevel.Error);
}
ActiveCollector activeCollector = null;
Task task = Task.Factory.StartNew(
() =>
{
try
{
Task.Factory.StartNew(
() =>
SetupAggregatedMetricListener(
AggregatedMetricsProviderGuid,
etlFileName,
metricProducedAction,
cancellationToken,
out activeCollector),
TaskCreationOptions.LongRunning);
cancellationToken.WaitHandle.WaitOne();
}
finally
{
StopEtwSession(activeCollector);
}
},
TaskCreationOptions.LongRunning);
Console.CancelKeyPress += (sender, args) => StopEtwSession(activeCollector);
return task;
}
/// <summary>
/// Stops the etw session.
/// </summary>
/// <param name="activeCollector">The active collector.</param>
private static void StopEtwSession(ActiveCollector activeCollector)
{
if (activeCollector != null)
{
ActiveCollector.StopCollector(activeCollector.Name);
}
}
/// <summary>
/// Setups the raw metric listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlFileName">The name of the etw file from when read data. If null, realtime session will be used.</param>
/// <param name="metricProducedAction">The when metric available.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="activeCollector">The active collector.</param>
private static void SetupRawMetricListener(
Guid providerGuid,
string etlFileName,
Action<ILocalRawMetric> metricProducedAction,
CancellationToken cancellationToken,
out ActiveCollector activeCollector)
{
var providers = new Dictionary<Guid, ProviderConfiguration>
{
{
providerGuid,
new ProviderConfiguration(providerGuid, EtwTraceLevel.Verbose, 0, 0)
}
};
var etwSessionConfig = new CollectorConfiguration(EtwSessionsPrefix + "raw-")
{
SessionType = SessionType.Realtime,
Providers = providers
};
activeCollector = new ActiveCollector(etwSessionConfig.Name);
activeCollector.StartCollector(etwSessionConfig);
RawListener etwListener = null;
try
{
etwListener = string.IsNullOrWhiteSpace(etlFileName)
? CreateRealTimeListener(providerGuid, etwSessionConfig.Name, metricProducedAction, cancellationToken)
: CreateFileListener(providerGuid, etlFileName, metricProducedAction, cancellationToken);
// TODO: Better to check providers periodically and retry several times.
if (!ActiveCollector.TryUpdateProviders(etwSessionConfig))
{
Logger.Log(LoggerLevel.Error, LogId, "Main", "Failed to update ETW providers. Terminating.");
return;
}
try
{
etwListener.Process();
}
finally
{
Logger.Log(
cancellationToken.IsCancellationRequested ? LoggerLevel.Info : LoggerLevel.Error,
LogId,
"SetupEtwDataPipeline",
"ETW Thread terminated unexpectedly, typically indicates that the ETW session was stopped.");
}
}
finally
{
if (etwListener != null)
{
etwListener.Dispose();
}
}
}
/// <summary>
/// Setups the aggregated metric listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlFileName">The name of the etw file from when read data. If null, realtime session will be used.</param>
/// <param name="metricProducedAction">The when metric available.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="activeCollector">The active collector.</param>
private static void SetupAggregatedMetricListener(
Guid providerGuid,
string etlFileName,
Action<ILocalAggregatedMetric> metricProducedAction,
CancellationToken cancellationToken,
out ActiveCollector activeCollector)
{
var providers = new Dictionary<Guid, ProviderConfiguration>
{
{
providerGuid,
new ProviderConfiguration(providerGuid, EtwTraceLevel.Verbose, 0, 0)
}
};
var etwSessionConfig = new CollectorConfiguration(EtwSessionsPrefix + "aggregated-")
{
SessionType = SessionType.Realtime,
Providers = providers
};
activeCollector = new ActiveCollector(etwSessionConfig.Name);
activeCollector.StartCollector(etwSessionConfig);
RawListener etwListener = null;
try
{
etwListener = string.IsNullOrWhiteSpace(etlFileName)
? CreateRealTimeListener(providerGuid, etwSessionConfig.Name, metricProducedAction, 1, cancellationToken)
: CreateFileListener(providerGuid, etlFileName, metricProducedAction, 1, cancellationToken);
// TODO: Better to check providers periodically and retry several times.
if (!ActiveCollector.TryUpdateProviders(etwSessionConfig))
{
Logger.Log(LoggerLevel.Error, LogId, "Main", "Failed to update ETW providers. Terminating.");
return;
}
try
{
etwListener.Process();
}
finally
{
Logger.Log(
cancellationToken.IsCancellationRequested ? LoggerLevel.Info : LoggerLevel.Error,
LogId,
"SetupEtwDataPipeline",
"ETW Thread terminated unexpectedly, typically indicates that the ETW session was stopped.");
}
}
finally
{
if (etwListener != null)
{
etwListener.Dispose();
}
}
}
/// <summary>
/// Creates the real time listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlSessionConfigName">Name of the etl session configuration.</param>
/// <param name="metricProducedAction">The metric produced action.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>An instance of <see cref="RawListener"/>.</returns>
private static RawListener CreateRealTimeListener(
Guid providerGuid,
string etlSessionConfigName,
Action<ILocalRawMetric> metricProducedAction,
CancellationToken cancellationToken)
{
return RawListener.CreateRealTimeListener(
etlSessionConfigName,
eventRecord =>
{
if (eventRecord->EventHeader.ProviderId == providerGuid)
{
metricProducedAction(LocalRawMetric.ConvertToMetricData(eventRecord));
}
},
eventTraceLog =>
{
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateRealTimeListener",
"LocalRawMetric, cancelled = {0}",
cancellationToken.IsCancellationRequested);
return !cancellationToken.IsCancellationRequested;
});
}
/// <summary>
/// Creates the file listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlFileName">Name of the etl file.</param>
/// <param name="metricProducedAction">The metric produced action.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>An instance of <see cref="RawListener"/>.</returns>
private static RawListener CreateFileListener(
Guid providerGuid,
string etlFileName,
Action<ILocalRawMetric> metricProducedAction,
CancellationToken cancellationToken)
{
return RawListener.CreateEtlFileListener(
new[] { etlFileName },
eventRecord =>
{
if (eventRecord->EventHeader.ProviderId == providerGuid)
{
metricProducedAction(LocalRawMetric.ConvertToMetricData(eventRecord));
}
},
eventTraceLog =>
{
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateFileListener",
"LocalRawMetric, cancelled = {0}",
cancellationToken.IsCancellationRequested);
return !cancellationToken.IsCancellationRequested;
});
}
/// <summary>
/// Creates the real time listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlSessionConfigName">Name of the etl session configuration.</param>
/// <param name="metricProducedAction">The metric produced action.</param>
/// <param name="eventIdFilter">The event identifier filter.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>A <see cref="RawListener"/> instance.</returns>
private static RawListener CreateRealTimeListener(
Guid providerGuid,
string etlSessionConfigName,
Action<ILocalAggregatedMetric> metricProducedAction,
int eventIdFilter,
CancellationToken cancellationToken)
{
return RawListener.CreateRealTimeListener(
etlSessionConfigName,
eventRecord =>
{
if (eventRecord->EventHeader.ProviderId == providerGuid && eventRecord->EventHeader.Id == eventIdFilter)
{
metricProducedAction(LocalAggregatedMetric.ConvertToMetricData(eventRecord));
}
},
eventTraceLog =>
{
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateRealTimeListener",
"LocalAggregatedMetric, cancelled = {0}",
cancellationToken.IsCancellationRequested);
return !cancellationToken.IsCancellationRequested;
});
}
/// <summary>
/// Creates the file listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlFileName">Name of the etl file.</param>
/// <param name="metricProducedAction">The metric produced action.</param>
/// <param name="eventIdFilter">The event identifier filter.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>A <see cref="RawListener"/> instance.</returns>
private static RawListener CreateFileListener(
Guid providerGuid,
string etlFileName,
Action<ILocalAggregatedMetric> metricProducedAction,
int eventIdFilter,
CancellationToken cancellationToken)
{
return RawListener.CreateEtlFileListener(
new[] { etlFileName },
eventRecord =>
{
if (eventRecord->EventHeader.ProviderId == providerGuid && eventRecord->EventHeader.Id == eventIdFilter)
{
metricProducedAction(LocalAggregatedMetric.ConvertToMetricData(eventRecord));
}
},
eventTraceLog =>
{
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateFileListener",
"LocalAggregatedMetric, cancelled = {0}",
cancellationToken.IsCancellationRequested);
return !cancellationToken.IsCancellationRequested;
});
}
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="FilteringConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using Newtonsoft.Json;
/// <summary>
/// Determine if the filtering is enabled or disabled for this preaggregate.
/// </summary>
public sealed class FilteringConfiguration : IFilteringConfiguration
{
/// <summary>
/// IFilteringConfiguration where filtering is enabled.
/// </summary>
public static readonly IFilteringConfiguration FilteringEnabled = new FilteringConfiguration(true);
/// <summary>
/// IFilteringConfiguration where filtering is disabled.
/// </summary>
public static readonly IFilteringConfiguration FilteringDisabled = new FilteringConfiguration(false);
/// <summary>
/// Initializes a new instance of the <see cref="FilteringConfiguration"/> class.
/// </summary>
/// <param name="enabled">Whether or not the feature is enabled.</param>
[JsonConstructor]
internal FilteringConfiguration(bool enabled)
{
this.Enabled = enabled;
}
/// <summary>
/// Determines if the filtering is enabled for this pre-aggregate based on configured pre-aggregate filters.
/// </summary>
public bool Enabled { get; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="StampLocator.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.ThirdParty
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Cloud.Metrics.Client.Utility;
using Newtonsoft.Json;
/// <summary>
/// Helper class to locate the stamp endpoint for Azure external customers' 3rd party accounts.
/// </summary>
public sealed class StampLocator : IStampLocator
{
private const string StampHostNameSuffix = ".metrics.nsatc.net";
private const string FileNameForRegionStampMap = "RegionToGenevaMetricsStampMap.json";
private static readonly Uri ThirdPartyRegionStampMapUrl = new Uri("https://stamplocator.metrics.nsatc.net/public/thirdPartyRegionStampMap");
private readonly Uri stampLocatorUrl;
private readonly string fullFilePathForRegionStampMap;
private readonly HttpClient httpClient;
private readonly TimeSpan refreshInternal = TimeSpan.FromHours(1);
private readonly ActivityReporter activityReporter;
private Timer timerToRefreshRegionStampMap;
private Dictionary<string, string> regionStampMap;
/// <summary>
/// Initializes a new instance of the <see cref="StampLocator" /> class.
/// </summary>
/// <param name="fullFilePathForRegionStampMap">The full file path for region stamp map.</param>
/// <param name="regionStampMap">The region stamp map.</param>
/// <param name="httpClient">The HTTP client.</param>
/// <param name="stampLocatorUrl">The stamp locator URL to retrieve the region stamp map.</param>
/// <param name="activityReporter">The activity reporter.</param>
/// <exception cref="MetricsClientException">The region stamp map failed to initialize or is empty.</exception>
private StampLocator(string fullFilePathForRegionStampMap, Dictionary<string, string> regionStampMap, HttpClient httpClient, Uri stampLocatorUrl, ActivityReporter activityReporter)
{
this.fullFilePathForRegionStampMap = fullFilePathForRegionStampMap;
this.regionStampMap = regionStampMap;
this.httpClient = httpClient;
this.stampLocatorUrl = stampLocatorUrl;
this.activityReporter = activityReporter;
}
/// <summary>
/// The call back to report StampLocator activities. Consumers are expected to emit metrics and logs.
/// </summary>
/// <param name="activity">The activity.</param>
/// <param name="isError">if set to <c>true</c>, the activity is an error.</param>
/// <param name="detail">The detail about the activity.</param>
public delegate void ActivityReporter(StampLocatorActivity activity, bool isError, string detail);
/// <summary>
/// Creates an instance of <see cref="StampLocator"/> asynchronously.
/// </summary>
/// <param name="folderToCacheRegionStampMap">The folder to cache region stamp map.</param>
/// <param name="activityReporter">The activity reporter. The string argument contains the error detail when the activity results in an error; otherwise it is null.</param>
/// <returns>An instance of <see cref="StampLocator"/>.</returns>
public static Task<IStampLocator> CreateInstanceAsync(string folderToCacheRegionStampMap, ActivityReporter activityReporter)
{
if (string.IsNullOrWhiteSpace(folderToCacheRegionStampMap))
{
throw new ArgumentException("The argument is null or empty", nameof(folderToCacheRegionStampMap));
}
if (activityReporter == null)
{
throw new ArgumentNullException(nameof(activityReporter));
}
return CreateInstanceAsync(
folderToCacheRegionStampMap,
HttpClientHelper.CreateHttpClient(ConnectionInfo.DefaultTimeout),
ThirdPartyRegionStampMapUrl,
activityReporter);
}
/// <summary>
/// Gets the stamp endpoint.
/// </summary>
/// <param name="subscriptionId">The subscription identifier.</param>
/// <param name="azureRegion">The azure region.</param>
/// <returns>The stamp endpoint for the given account identified by <paramref name="subscriptionId"/>.</returns>
public Uri GetStampEndpoint(string subscriptionId, string azureRegion)
{
var stampName = this.GetStampName(subscriptionId, azureRegion);
return new Uri($"https://{stampName}{StampHostNameSuffix}");
}
/// <summary>
/// Gets the stamp name.
/// </summary>
/// <param name="subscriptionId">The subscription identifier.</param>
/// <param name="azureRegion">The azure region.</param>
/// <returns>The stamp name for the given account identified by <paramref name="subscriptionId"/>.</returns>
public string GetStampName(string subscriptionId, string azureRegion)
{
if (this.regionStampMap.ContainsKey(azureRegion))
{
return this.regionStampMap[azureRegion];
}
throw new MetricsClientException($"There is no MDM stamp for region [{azureRegion}]. Available regions are [{string.Join(",", this.regionStampMap.Keys)}].");
}
/// <summary>
/// Creates the instance asynchronously.
/// </summary>
/// <param name="folderToCacheRegionStampMap">The folder to cache region stamp map.</param>
/// <param name="httpClient">The HTTP client.</param>
/// <param name="stampLocatorUrl">The stamp locator URL.</param>
/// <param name="activityReporter">The activity reporter.</param>
/// <returns>
/// An instance of <see cref="StampLocator" />.
/// </returns>
internal static async Task<IStampLocator> CreateInstanceAsync(
string folderToCacheRegionStampMap,
HttpClient httpClient,
Uri stampLocatorUrl,
ActivityReporter activityReporter)
{
string fullFilePathForRegionStampMap = null;
Dictionary<string, string> regionStampMap = null;
if (!string.IsNullOrWhiteSpace(folderToCacheRegionStampMap) && Directory.Exists(folderToCacheRegionStampMap))
{
fullFilePathForRegionStampMap = Path.Combine(folderToCacheRegionStampMap, FileNameForRegionStampMap);
if (File.Exists(fullFilePathForRegionStampMap))
{
string fileContent = null;
activityReporter(StampLocatorActivity.StartToLoadRegionStampMapFromLocalFile, false, $"File name:{fullFilePathForRegionStampMap}.");
try
{
fileContent = File.ReadAllText(fullFilePathForRegionStampMap);
regionStampMap = CreateNewRegionStampMap(fileContent);
activityReporter(StampLocatorActivity.FinishedLoadingRegionStampMapFromLocalFile, false, $"fileContent:{fileContent}, regionStampMap:{JsonConvert.SerializeObject(regionStampMap)}.");
}
catch (Exception e)
{
var errorMessage =
$"Failed to create the region stamp map from local file. FilePath:{fullFilePathForRegionStampMap}, FileContent:{fileContent}, Exception:{e}.";
activityReporter(StampLocatorActivity.FailedToLoadRegionStampMapFromLocalFile, true, errorMessage);
}
}
else
{
activityReporter(StampLocatorActivity.FailedToLoadRegionStampMapFromLocalFile, true, $"File {fullFilePathForRegionStampMap} doesn't exist.");
}
}
else
{
activityReporter(StampLocatorActivity.FailedToLoadRegionStampMapFromLocalFile, true, $"Folder {folderToCacheRegionStampMap} doesn't exist.");
}
var instance = new StampLocator(fullFilePathForRegionStampMap, regionStampMap, httpClient, stampLocatorUrl, activityReporter);
await instance.RefreshNoThrow().ConfigureAwait(false);
if (instance.regionStampMap == null || instance.regionStampMap.Count == 0)
{
throw new MetricsClientException("The region stamp map failed to initialize or is empty.");
}
instance.timerToRefreshRegionStampMap = new Timer(async state => await instance.RefreshNoThrow().ConfigureAwait(false), null, instance.refreshInternal, instance.refreshInternal);
return instance;
}
private static Dictionary<string, string> CreateNewRegionStampMap(string response)
{
var dictionary = JsonConvert.DeserializeObject<Dictionary<string, string>>(response);
var newRegionStampMap = new Dictionary<string, string>(2 * dictionary.Count, StringComparer.OrdinalIgnoreCase);
foreach (var kvp in dictionary)
{
newRegionStampMap.Add(kvp.Key, kvp.Value);
newRegionStampMap.Add(kvp.Key.Replace(" ", string.Empty), kvp.Value);
}
return newRegionStampMap;
}
private async Task<bool> RefreshNoThrow()
{
this.activityReporter(StampLocatorActivity.StartToRefrehRegionStampMap, false, $"Url:{this.stampLocatorUrl}.");
string response = null;
try
{
var responseMessage = await HttpClientHelper.GetResponse(this.stampLocatorUrl, HttpMethod.Get, this.httpClient, "MA", "RefreshMap").ConfigureAwait(false);
response = responseMessage.Item1;
this.regionStampMap = CreateNewRegionStampMap(response);
this.activityReporter(StampLocatorActivity.FinishedRefreshingRegionStampMap, false, $"regionStampMap:{JsonConvert.SerializeObject(this.regionStampMap)}.");
}
catch (Exception e)
{
string errorMessage = $"Failed to refresh the region stamp map. Response:{response}, Url:{this.stampLocatorUrl}, Exception:{e}.";
this.activityReporter(StampLocatorActivity.FailedToRefrehRegionStampMap, true, errorMessage);
}
if (this.fullFilePathForRegionStampMap != null && response != null)
{
try
{
this.activityReporter(StampLocatorActivity.StartToWriteRegionStampMapToLocalFile, false, $"File name:{this.fullFilePathForRegionStampMap}.");
File.WriteAllText(this.fullFilePathForRegionStampMap, response);
this.activityReporter(StampLocatorActivity.FinishedWritingRegionStampMapToLocalFile, false, string.Empty);
}
catch (Exception e)
{
string errorMessage = $"Writing to {this.fullFilePathForRegionStampMap} failed with {e}.";
this.activityReporter(StampLocatorActivity.FailedToWriteRegionStampMapToLocalFile, true, errorMessage);
}
}
return true;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="CompositeExpression.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
/// <summary>
/// Expression to be evaluated to create sampling types for the composite metric.
/// </summary>
public sealed class CompositeExpression : IExpression
{
private string name;
private string expression;
/// <summary>
/// Initializes a new instance of the <see cref="CompositeExpression"/> class.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="expression">The expression.</param>
public CompositeExpression(string name, string expression)
{
if (string.IsNullOrWhiteSpace(name))
{
throw new ArgumentNullException(nameof(name));
}
if (string.IsNullOrWhiteSpace(expression))
{
throw new ArgumentNullException(nameof(expression));
}
this.name = name;
this.expression = expression;
}
/// <summary>
/// Gets or sets the name of the expression.
/// </summary>
public string Name
{
get
{
return this.name;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.name = value;
}
}
/// <summary>
/// Gets or sets the expression.
/// </summary>
public string Expression
{
get
{
return this.expression;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.expression = value;
}
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IMonitorHealthStatus.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization.Monitor
{
using System;
/// <summary>
/// The interface representing the monitor health status.
/// </summary>
public interface IMonitorHealthStatus
{
/// <summary>
/// Gets a value indicating the healthy status of the last report.
/// </summary>
bool Healthy { get; }
/// <summary>
/// Gets the last time when monitor reported a status
/// </summary>
DateTimeOffset? TimeStamp { get; }
/// <summary>
/// Gets the message for the last monitor report
/// </summary>
string Message { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IPermission.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
/// <summary>
/// This object represents one entity and their level of access to an MDM Account.
/// </summary>
[Obsolete]
public interface IPermission
{
/// <summary>
/// The identity to grant permission.
/// </summary>
string Identity { get; }
/// <summary>
/// The level of access to be granted to this identity.
/// </summary>
Role Role { get; set; }
}
}
<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="DiagnosticHeartbeatReader.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.MetricsExtension
{
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Logging;
using Metrics.Etw;
/// <summary>
/// The class for consumption of ME diagnostic heart beats.
/// </summary>
public sealed unsafe class DiagnosticHeartbeatReader
{
/// <summary>
/// Prefix added to ETW sessions that are using the configuration of a collector.
/// </summary>
private const string EtwSessionsPrefix = "DiagnosticHeartbeatReader-";
/// <summary>
/// Custom log id to be used in the log statements.
/// </summary>
private static readonly object LogId = Logger.CreateCustomLogId("DiagnosticHeartbeatReader");
/// <summary>
/// The provider unique identifier.
/// </summary>
private static readonly Guid ProviderGuid = new Guid("{2F23A2A9-0DE7-4CB4-A778-FBDF5C1E7372}");
/// <summary>
/// Gets or sets a value indicating whether [enable verbose logging].
/// </summary>
public bool EnableVerboseLogging { get; set; }
/// <summary>
/// Reads the diagnostic heartbeats.
/// </summary>
/// <param name="heartbeatAction">The heartbeat action.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The task listening for ETW events.</returns>
public Task ReadDiagnosticHeartbeatsAsync(
Action<IDiagnosticHeartbeat> heartbeatAction,
CancellationToken cancellationToken)
{
if (heartbeatAction == null)
{
throw new ArgumentNullException(nameof(heartbeatAction));
}
if (cancellationToken == null)
{
throw new ArgumentNullException(nameof(cancellationToken));
}
if (!this.EnableVerboseLogging)
{
Logger.SetMaxLogLevel(LoggerLevel.Error);
}
ActiveCollector activeCollector = null;
Task task = Task.Factory.StartNew(
() =>
{
try
{
Task.Factory.StartNew(
() =>
SetupListener(
ProviderGuid,
heartbeatAction,
cancellationToken,
out activeCollector),
TaskCreationOptions.LongRunning);
cancellationToken.WaitHandle.WaitOne();
}
finally
{
StopEtwSession(activeCollector);
}
},
TaskCreationOptions.LongRunning);
Console.CancelKeyPress += (sender, args) => StopEtwSession(activeCollector);
return task;
}
/// <summary>
/// Stops the etw session.
/// </summary>
/// <param name="activeCollector">The active collector.</param>
private static void StopEtwSession(ActiveCollector activeCollector)
{
if (activeCollector != null)
{
ActiveCollector.StopCollector(activeCollector.Name);
}
}
/// <summary>
/// Setups the listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="action">The action.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="activeCollector">The active collector.</param>
private static void SetupListener(
Guid providerGuid,
Action<IDiagnosticHeartbeat> action,
CancellationToken cancellationToken,
out ActiveCollector activeCollector)
{
var providers = new Dictionary<Guid, ProviderConfiguration>
{
{
providerGuid,
new ProviderConfiguration(providerGuid, EtwTraceLevel.Verbose, 0, 0)
}
};
var etwSessionConfig = new CollectorConfiguration(EtwSessionsPrefix + "-")
{
SessionType = SessionType.Realtime,
Providers = providers
};
activeCollector = new ActiveCollector(etwSessionConfig.Name);
activeCollector.StartCollector(etwSessionConfig);
RawListener etwListener = null;
try
{
etwListener = CreateRealTimeListener(providerGuid, etwSessionConfig.Name, action, 2, cancellationToken);
// TODO: Better to check providers periodically and retry several times.
if (!ActiveCollector.TryUpdateProviders(etwSessionConfig))
{
Logger.Log(LoggerLevel.Error, LogId, "Main", "Failed to update ETW providers. Terminating.");
return;
}
try
{
etwListener.Process();
}
finally
{
Logger.Log(
cancellationToken.IsCancellationRequested ? LoggerLevel.Info : LoggerLevel.Error,
LogId,
"SetupEtwDataPipeline",
"ETW Thread terminated unexpectedly, typically indicates that the ETW session was stopped.");
}
}
finally
{
etwListener?.Dispose();
}
}
/// <summary>
/// Creates the real time listener.
/// </summary>
/// <param name="providerGuid">The provider unique identifier.</param>
/// <param name="etlSessionConfigName">Name of the etl session configuration.</param>
/// <param name="action">The action.</param>
/// <param name="eventIdFilter">Event Id filter to call the action</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The ETW listener</returns>
private static RawListener CreateRealTimeListener(
Guid providerGuid,
string etlSessionConfigName,
Action<IDiagnosticHeartbeat> action,
int eventIdFilter,
CancellationToken cancellationToken)
{
return RawListener.CreateRealTimeListener(
etlSessionConfigName,
eventRecord =>
{
if (eventRecord->EventHeader.ProviderId == providerGuid && eventRecord->EventHeader.Id == eventIdFilter)
{
action(DiagnosticHeartbeat.FromEtwEvent(eventRecord));
}
},
eventTraceLog =>
{
Logger.Log(
LoggerLevel.Info,
LogId,
"CreateRealTimeListener",
"DiagnosticHeartbeat, cancelled = {0}",
cancellationToken.IsCancellationRequested);
return !cancellationToken.IsCancellationRequested;
});
}
}
}<file_sep>using k8s;
using k8s.Models;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace K8SClient
{
public class K8sOperations
{
public static async Task<List<Extensionsv1beta1Ingress>> ListAllIngressAsync(IKubernetes client, string ns, CancellationToken cancellationToken = default)
{
var results = new List<Extensionsv1beta1Ingress>();
string continueParameter = null;
do
{
var resp = await client.ListNamespacedIngressAsync(ns, continueParameter: continueParameter, limit: 100, cancellationToken: cancellationToken);
continueParameter = resp.Metadata.ContinueProperty;
results.AddRange(resp.Items);
} while (continueParameter != null);
return results;
}
public static async Task<List<V1Service>> ListAllServiceAsync(IKubernetes client, string ns, CancellationToken cancellationToken = default)
{
var results = new List<V1Service>();
string continueParameter = null;
do
{
var resp = await client.ListNamespacedServiceAsync(ns, continueParameter: continueParameter, limit: 100, cancellationToken: cancellationToken);
continueParameter = resp.Metadata.ContinueProperty;
results.AddRange(resp.Items);
} while (continueParameter != null);
return results;
}
public static async Task<List<V1Deployment>> ListAllDeployments(IKubernetes client, string ns, CancellationToken cancellationToken = default)
{
var results = new List<V1Deployment>();
string continueParameter = null;
do
{
var resp = await client.ListNamespacedDeploymentAsync(ns, continueParameter: continueParameter, limit: 100, cancellationToken: cancellationToken);
continueParameter = resp.Metadata.ContinueProperty;
results.AddRange(resp.Items);
} while (continueParameter != null);
return results;
}
public static async Task<List<V1ReplicaSet>> ListReplicaSet(IKubernetes client, string ns, CancellationToken cancellationToken = default)
{
var results = new List<V1ReplicaSet>();
string continueParameter = null;
do
{
var resp = await client.ListNamespacedReplicaSetAsync(ns, continueParameter: continueParameter, limit: 100, cancellationToken: cancellationToken);
continueParameter = resp.Metadata.ContinueProperty;
results.AddRange(resp.Items);
} while (continueParameter != null);
return results;
}
public static async Task<List<V1Pod>> ListPodsAsync(IKubernetes client, string ns, CancellationToken cancellationToken = default)
{
var list = new List<V1Pod>();
string continueParameter = null;
do
{
var resp = await client.ListNamespacedPodAsync(ns, continueParameter: continueParameter, limit: 100, cancellationToken: cancellationToken);
continueParameter = resp.Metadata.ContinueProperty;
list.AddRange(resp.Items);
} while (continueParameter != null);
return list;
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IFilteringConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// Determines if the filtering is enabled for this pre-aggregate based on configured pre-aggregate filters.
/// </summary>
public interface IFilteringConfiguration
{
/// <summary>
/// Determines if the filtering is enabled for this pre-aggregate based on configured pre-aggregate filters.
/// </summary>
bool Enabled { get; }
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IPercentileConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// Determine if the percentile sampling types are available for this preaggregate.
/// </summary>
public interface IPercentileConfiguration
{
/// <summary>
/// Determines if the percentile sampling types are enabled or disabled.
/// </summary>
bool Enabled { get; }
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IBlobSegment.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Metrics.Services.Common.BlobSegment
{
/// <summary>
/// Blob segment interface
/// </summary>
public interface IBlobSegment : IPoolTrackable
{
/// <summary>
/// Gets the data that is stored in the segment.
/// </summary>
byte[] Data { get; }
/// <summary>
/// Gets or sets the reference to the next segment in the blob (null for the last segment of the blob).
/// </summary>
IBlobSegment Next { get; set; }
/// <summary>
/// Initializes an instance of BlobSegment class.
/// </summary>
/// <param name="nextSegment">The next segment in the linked list.</param>
/// <param name="purpose">Purpose of the segment usage. Useful for debugging memory leaks.</param>
void Initialize(
IBlobSegment nextSegment,
SegmentPurpose purpose);
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="CompositeMetricSource.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
using System;
/// <summary>
/// Identity and display name of the metric that will be used in the composite.
/// </summary>
public class CompositeMetricSource
{
private string displayName;
private string monitoringAccount;
private string metricNamespace;
private string metric;
/// <summary>
/// Initializes a new instance of the <see cref="CompositeMetricSource"/> class.
/// </summary>
/// <param name="displayName">The display name.</param>
/// <param name="monitoringAccount">The monitoring account.</param>
/// <param name="metricNamespace">The metric namespace.</param>
/// <param name="metric">The metric.</param>
public CompositeMetricSource(string displayName, string monitoringAccount, string metricNamespace, string metric)
{
if (string.IsNullOrWhiteSpace(displayName))
{
throw new ArgumentNullException(nameof(displayName));
}
if (string.IsNullOrWhiteSpace(monitoringAccount))
{
throw new ArgumentNullException(nameof(monitoringAccount));
}
if (string.IsNullOrWhiteSpace(metricNamespace))
{
throw new ArgumentNullException(nameof(metricNamespace));
}
if (string.IsNullOrWhiteSpace(metric))
{
throw new ArgumentNullException(nameof(metric));
}
this.displayName = displayName;
this.monitoringAccount = monitoringAccount;
this.metricNamespace = metricNamespace;
this.metric = metric;
}
/// <summary>
/// Gets or sets the display name.
/// </summary>
public string DisplayName
{
get
{
return this.displayName;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.displayName = value;
}
}
/// <summary>
/// Gets or sets the monitoring account.
/// </summary>
public string MonitoringAccount
{
get
{
return this.monitoringAccount;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.monitoringAccount = value;
}
}
/// <summary>
/// Gets or sets the metric namespace.
/// </summary>
public string MetricNamespace
{
get
{
return this.metricNamespace;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.metricNamespace = value;
}
}
/// <summary>
/// Gets or sets the metric.
/// </summary>
public string Metric
{
get
{
return this.metric;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentNullException(nameof(value));
}
this.metric = value;
}
}
}
}<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="MonitorHealthStatus.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System;
using Microsoft.Online.Metrics.Serialization.Monitor;
/// <summary>
/// The class representing the monitor status.
/// </summary>
internal sealed class MonitorHealthStatus : IMonitorHealthStatus
{
/// <summary>
/// Initializes a new instance of the <see cref="MonitorHealthStatus"/> class.
/// </summary>
/// <param name="healthy">if set to <c>true</c> [healthy].</param>
/// <param name="timeStamp">The time stamp of the last monitor report.</param>
/// <param name="message">The message for the last monitor report.</param>
public MonitorHealthStatus(bool healthy, DateTimeOffset? timeStamp, string message)
{
this.Healthy = healthy;
this.TimeStamp = timeStamp;
this.Message = message;
}
/// <summary>
/// Gets a value indicating the healthy status of the last report.
/// </summary>
public bool Healthy { get; private set; }
/// <summary>
/// Gets the last time when monitor reported a status
/// </summary>
public DateTimeOffset? TimeStamp { get; private set; }
/// <summary>
/// Gets the message for the last monitor report
/// </summary>
public string Message { get; private set; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="CrcCheckFailedSerializationException.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// <author email="selavrin">
// <NAME>
// </author>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
/// <summary>
/// An exception used to report about situation when CRC check fails.
/// </summary>
[Serializable]
public sealed class CrcCheckFailedSerializationException : MetricSerializationException
{
/// <summary>
/// Initializes a new instance of the <see cref="CrcCheckFailedSerializationException"/> class.
/// </summary>
/// <param name="message">A message explaining the cause for exception situation.</param>
public CrcCheckFailedSerializationException(string message)
: base(message, null)
{
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="Extensions.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client
{
using System;
using System.Collections.Generic;
using Microsoft.Online.Metrics.Serialization.Configuration;
/// <summary>
/// The extension class.
/// </summary>
public static class Extensions
{
/// <summary>
/// Gets the index of the <paramref name="dimensionName"/> in dimension combination list.
/// </summary>
/// <param name="definitions">The time series definitions.</param>
/// <param name="dimensionName">Name of the dimension.</param>
/// <returns>The index of the <paramref name="dimensionName"/> in dimension combination list, or -1 if not found.</returns>
public static int GetIndexInDimensionCombination(this IReadOnlyList<TimeSeriesDefinition<MetricIdentifier>> definitions, string dimensionName)
{
if (definitions == null || definitions.Count == 0)
{
throw new ArgumentException("definitions is null or empty.");
}
if (string.IsNullOrWhiteSpace(dimensionName))
{
throw new ArgumentException("dimensionName is null or empty.");
}
var definition = definitions[0];
if (definition.DimensionCombination == null)
{
return -1;
}
for (int i = 0; i < definition.DimensionCombination.Count; ++i)
{
if (dimensionName.Equals(definition.DimensionCombination[i].Key, StringComparison.OrdinalIgnoreCase))
{
return i;
}
}
return -1;
}
}
}
<file_sep>using AzSignalR.Monitor.JobRegistry;
using k8s;
using K8SClient;
using McMaster.Extensions.CommandLineUtils;
using System;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
namespace MetricsMonitor
{
[VersionOptionFromMember(MemberName = nameof(GetVersion))]
[HelpOption("--help")]
[Subcommand(
typeof(ScanPodMetricsOption))]
internal class CommandLineOptions : BaseOption
{
public string GetVersion()
=> typeof(CommandLineOptions).Assembly.GetCustomAttribute<AssemblyInformationalVersionAttribute>()?.InformationalVersion;
protected override Task OnExecuteAsync(CommandLineApplication app)
{
app.ShowHelp();
return Task.CompletedTask;
}
}
[Command(Name = "podmetrics", FullName = "podmetrics", Description = "Scan the metrics for all pods in a specific region")]
internal class ScanPodMetricsOption : BaseOption
{
[Option("-k|--k8sConfig", Description = "Specify the k8s configuration")]
public string K8sConfig { get; set; }
[Option("-r|--region", Description = "Specify the region where you want to get metrics from")]
public string Region { get; set; }
[Option("-c|--certPath", Description = "Specify the mdm client certificate path. Default is mdm.pfx")]
public string MdmClientCertPath { get; set; } = "mdm.pfx";
[Option("-p|--certPasswd", Description = "Specify the certificate password")]
public string MdmClientCertPasswd { get; set; }
[Option("-m|--metricsName", Description = "Specify the metrics name: <PodConnectionCount>|<MessageCount>, default is PodConnectionCount")]
public string MetricsName { get; set; } = MdmClient.PodConnectionCount;
private async Task<string[]> GetFreeTierDeployments(IKubernetes client, CancellationToken cancellationToken = default)
{
var deployments = await K8sOperations.ListAllDeployments(client, "default");
var freeDeployments = deployments.Where(d => d.Spec.Replicas == 1).Select(d => d.Metadata.Name).ToArray();
return freeDeployments;
}
private bool isFreeTierPod(string[] freeTierList, string podName)
{
if (freeTierList != null && freeTierList.Length > 0)
{
foreach (var free in freeTierList)
{
if (podName.StartsWith(free))
{
return true;
}
}
}
return false;
}
protected override async Task OnExecuteAsync(CommandLineApplication app)
{
if (!ValidateParameters())
{
return;
}
var kubeClientCache = new KubernetesClientCache();
var kubeClient = kubeClientCache.Get("CheckPods", File.ReadAllText(this.K8sConfig));
var pods = await K8sOperations.ListPodsAsync(kubeClient, "default");
var freeTierDeployArray = await GetFreeTierDeployments(kubeClient);
var mdmClient = new MdmClient(MdmClientCertPath, MdmClientCertPasswd, Region);
var podMetrics = await mdmClient.GetDimensionCountMetricsAsync(MetricsName, TimeSpan.FromHours(1), "InstanceId");
if (podMetrics == null)
{
Console.WriteLine($"Fail to get metrics for {Region}");
return;
}
bool foundFreeTier = false;
Parallel.ForEach(pods, pod =>
{
bool isFreeTier = isFreeTierPod(freeTierDeployArray, pod.Metadata.Name);
if (isFreeTier)
{
foundFreeTier = true;
}
var hasMetrics = podMetrics.TryGetValue(pod.Metadata.Name, out double count);
if (!hasMetrics)
{
if (isFreeTier)
{
Console.WriteLine($"Free pod {pod.Metadata.Name} miss metrics");
}
else
{
Console.WriteLine($"{pod.Metadata.Name} miss metrics");
}
}
});
if (!foundFreeTier)
{
Console.WriteLine("Does not see free tier");
}
}
private bool ValidateParameters()
{
if (String.IsNullOrEmpty(this.K8sConfig))
{
Console.WriteLine("Missing k8s configuration, please specify it by -k");
return false;
}
if (String.IsNullOrEmpty(this.Region))
{
Console.WriteLine("Missing region, please specify it by -r");
return false;
}
if (String.IsNullOrEmpty(this.MdmClientCertPath))
{
Console.WriteLine("Missing Mdm client certification, please specify it by -c");
return false;
}
if (String.IsNullOrEmpty(this.MdmClientCertPasswd))
{
Console.WriteLine("Missing Mdm client certification password, please specify it by -p");
return false;
}
return true;
}
}
[HelpOption("--help")]
internal abstract class BaseOption
{
protected virtual Task OnExecuteAsync(CommandLineApplication app)
{
return Task.CompletedTask;
}
protected static void ReportError(Exception ex)
{
Console.ForegroundColor = ConsoleColor.Red;
Console.Error.WriteLine($"Unexpected error: {ex}");
Console.ResetColor();
}
}
}
<file_sep>//-------------------------------------------------------------------------------------------------
// <copyright file="IPublicationConfiguration.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Configuration
{
/// <summary>
/// Determine which metric storage should be used for the data from this preaggregate.
/// </summary>
public interface IPublicationConfiguration
{
/// <summary>
/// Gets or sets a value indicating whether cache server publication is disabled.
/// </summary>
bool CacheServerPublicationDisabled { get; }
/// <summary>
/// Determines if the feature is enabled or disabled.
/// </summary>
bool MetricStorePublicationEnabled { get; }
/// <summary>
/// Gets a value indicating whether the preaggregate should be published as an aggregated metrics store metric.
/// </summary>
bool AggregatedMetricsStorePublication { get; }
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PropertyDefinition.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Query
{
using System;
using Metrics;
using Newtonsoft.Json;
/// <summary>
/// Aggregation types which can be used to create properties based on time series data
/// </summary>
public enum PropertyAggregationType
{
Average,
Sum,
Min,
Max
}
/// <summary>
/// Defines the calculation that will occur to get a query service property. The sampling type to act
/// on, and the aggregation to perform on that sampling type.
/// </summary>
public sealed class PropertyDefinition
{
/// <summary>
/// Initializes a new instance of the <see cref="PropertyDefinition"/> class.
/// </summary>
/// <param name="propertyAggregationType">Type of the property aggregation.</param>
/// <param name="samplingType">Name of the sampling type.</param>
[JsonConstructor]
public PropertyDefinition(PropertyAggregationType propertyAggregationType, SamplingType samplingType)
{
this.PropertyAggregationType = propertyAggregationType;
this.SamplingType = samplingType;
this.PropertyName = GetPropertyName(this.PropertyAggregationType, this.SamplingType.Name);
}
/// <summary>
/// Gets the type of the property aggregation.
/// </summary>
public PropertyAggregationType PropertyAggregationType { get; }
/// <summary>
/// Gets the name of the sampling type.
/// </summary>
public SamplingType SamplingType { get; }
/// <summary>
/// Gets the name of the property.
/// </summary>
public string PropertyName { get; }
/// <summary>
/// Gets the name of the resulted property.
/// </summary>
/// <param name="propertyAggregationType">Property aggregation type.</param>
/// <param name="samplingTypeName">Sampling type to which aggregation is applied.</param>
/// <returns>Name of the property.</returns>
public static string GetPropertyName(PropertyAggregationType propertyAggregationType, string samplingTypeName)
{
switch (propertyAggregationType)
{
case PropertyAggregationType.Average:
return $"TAVG({samplingTypeName})";
case PropertyAggregationType.Sum:
return $"TSUM({samplingTypeName})";
case PropertyAggregationType.Max:
return $"TMAX({samplingTypeName})";
case PropertyAggregationType.Min:
return $"TMIN({samplingTypeName})";
default:
throw new ArgumentException($"Unexpected propertyAggregationType: {propertyAggregationType}.", nameof(propertyAggregationType));
}
}
}
}
<file_sep>// -----------------------------------------------------------------------
// <copyright file="LocalRawMetric.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
namespace Microsoft.Cloud.Metrics.Client.Metrics
{
using System;
using System.Collections.Generic;
using Etw;
using Logging;
/// <summary>
/// A class representing data for one raw metric.
/// </summary>
internal sealed class LocalRawMetric : ILocalRawMetric
{
/// <summary>
/// The platform metric etw operation code
/// </summary>
private const int PlatformMetricEtwOperationCode = 51;
/// <summary>
/// Custom log id to be used in the log statements.
/// </summary>
private static readonly object LogId = Logger.CreateCustomLogId("LocalRawMetric");
/// <summary>
/// Gets the Monitoring Account to which this metric is reported.
/// </summary>
public string MonitoringAccount { get; private set; }
/// <summary>
/// Gets the metric namespace.
/// </summary>
public string MetricNamespace { get; private set; }
/// <summary>
/// Gets the metric name.
/// </summary>
public string MetricName { get; private set; }
/// <summary>
/// Gets the metric dimensions.
/// </summary>
public IDictionary<string, string> Dimensions { get; private set; }
/// <summary>
/// Gets a value indicating whether the metric is a platform metric.
/// In such case its value should be taken using property <see cref="MetricDoubleValue"/>.
/// </summary>
public bool IsPlatformMetric { get; private set; }
/// <summary>
/// Gets the metric time bucket.
/// </summary>
public DateTime MetricTimeUtc { get; private set; }
/// <summary>
/// Gets the sample value of metric emitted using Metric API.
/// </summary>
public ulong MetricLongValue { get; private set; }
/// <summary>
/// Gets the sample value of the Platform specific metric.
/// </summary>
public double MetricDoubleValue { get; private set; }
/// <summary>
/// Converts content of the ETW event to <see cref="LocalRawMetric"/>.
/// </summary>
/// <param name="etwMetricData">Object containing information about metric data sample.</param>
/// <returns>A <see cref="LocalRawMetric"/> object representing metric sample data.</returns>
/// <exception cref="ArgumentException">Throw when information contained in metricDataRecord is in incorrect format.</exception>
internal static unsafe LocalRawMetric ConvertToMetricData(NativeMethods.EventRecord* etwMetricData)
{
try
{
// Read ETW event time and use as metric time
var etwTimeUtc = DateTime.FromFileTimeUtc(etwMetricData->EventHeader.TimeStamp);
IntPtr pointerInPayload = etwMetricData->UserData;
// Get number of dimensions
ushort dimensionsCount = *((ushort*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(ushort));
// Shift 6 bytes as this space is reserved for alignment
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(ushort) + sizeof(uint));
// If time was reported with metric, use it
long timestamp = *((long*)pointerInPayload);
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(long));
var metricTimeUtc = timestamp == 0 ? etwTimeUtc : DateTime.FromFileTimeUtc(timestamp);
// Metric sample value which is either value or delta
double doubleMetricSampleValue = 0;
long metricSampleValue = 0;
if (etwMetricData->EventHeader.Id == PlatformMetricEtwOperationCode)
{
doubleMetricSampleValue = *((double*)pointerInPayload);
}
else
{
metricSampleValue = *((long*)pointerInPayload);
}
pointerInPayload = EtwPayloadManipulationUtils.Shift(pointerInPayload, sizeof(long));
// Read monitoring account, metric namespace and name
var monitoringAccount = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
var metricNameSpace = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
var metricName = EtwPayloadManipulationUtils.ReadString(ref pointerInPayload);
var dimensionNames = new List<string>();
for (int i = 0; i < dimensionsCount; ++i)
{
dimensionNames.Add(EtwPayloadManipulationUtils.ReadString(ref pointerInPayload));
}
var dimensionValues = new List<string>();
for (int i = 0; i < dimensionsCount; ++i)
{
dimensionValues.Add(EtwPayloadManipulationUtils.ReadString(ref pointerInPayload));
}
var dimensions = new Dictionary<string, string>();
for (int i = 0; i < dimensionsCount; ++i)
{
dimensions[dimensionNames[i]] = dimensionValues[i];
}
return new LocalRawMetric
{
IsPlatformMetric =
etwMetricData->EventHeader.Id == PlatformMetricEtwOperationCode,
MetricTimeUtc = metricTimeUtc,
MetricLongValue = (ulong)metricSampleValue,
MetricDoubleValue = doubleMetricSampleValue,
MonitoringAccount = monitoringAccount,
MetricNamespace = metricNameSpace,
MetricName = metricName,
Dimensions = dimensions
};
}
catch (Exception e)
{
Logger.Log(
LoggerLevel.Error,
LogId,
"ConvertToMetricData",
"Failed to read raw metric daat from the ETW event payload.",
e);
throw;
}
}
}
}<file_sep>// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SamplingTypes.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Online.Metrics.Serialization
{
using System;
/// <summary>
/// Lists sampling types which metric may contain.
/// </summary>
[Flags]
public enum SamplingTypes
{
None = 0x0,
Min = 0x1,
Max = 0x2,
Sum = 0x4,
Count = 0x10,
Histogram = 0x20,
SumCount = Sum | Count,
SumCountMinMax = SumCount | Min | Max,
HyperLogLogSketch = 0x40,
DisableTrim = 0x80, // Not a sampling type per se but useful to send the information without changing serialization versions
ClientSideLastSampleOnly = 0x100,
DoubleValueType = 0x200, // Not a sampling type per se but useful to send the information without changing serialization versions
DoubleValueStoredAsLongType = 0x400, // Not a sampling type per se but useful to send the information without changing serialization versions
AggregatedTimeSeries = 0x800, // A flag indicating that samples belong to the aggregated time series (used in mStore aggregation pipeline)
SendToCacheServer = 0x1000, // A flag indicating that samples aggregated on the FrontEnd should be sent to CacheServer
SendToMStore = 0x2000, // A flag indicating that samples aggregated on the FrontEnd should be sent to mStore
ComplexTypes = Histogram | HyperLogLogSketch,
SumOfSquareDiffFromMean = 0x4000, // Used for calculating standard deviation, for more info look article https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Welford's_Online_algorithm
TDigest = 0x8000,
}
}
| 88d225e547092a5069ef29c46204520d8991fb67 | [
"Markdown",
"C#"
] | 175 | C# | clovertrail/metrics-monitor | 8dc09779204a462492166c44c872d5759cf885e4 | d458ade29db17144f2edf9bcedb2b825fe944738 | |
refs/heads/master | <repo_name>yijunjun/moduse<file_sep>/README.md
# moduse
go mod use
<file_sep>/go.mod
module mod
require github.com/yijunjun/modtest v1.0.0
<file_sep>/main.go
package main
import (
"fmt"
"github.com/yijunjun/modtest"
)
func main() {
fmt.Println(modtest.Hi("yijunjun"))
} | 0aeb662808682e114f695997a7c0e0d77c043ff6 | [
"Markdown",
"Go Module",
"Go"
] | 3 | Markdown | yijunjun/moduse | f3ec0783ae830afc62a55472dc93825507dcf035 | d7592d7093fb8feb1ea09b01f3dcc08fdbc933bb | |
refs/heads/master | <repo_name>easter0731/Bioconductor<file_sep>/Bioconductor.R
# Function to check bioconductor packages
installpkg <- function(pkg){
if(!require(pkg,character.only=T)){
source("http://bioconductor.org/biocLite.R")
biocLite(pkg)
}
require(pkg,character.only=T)
}
installpkg("DiffBind")
source("http://bioconductor.org/biocLite.R")
biocLite("GenomicAlignments")
library("GenomicAlignments")
choosebank("genbank")
<file_sep>/DiffBind.R
### R code from vignette source 'DiffBind.Rnw'
###################################################
### code chunk number 1: style
###################################################
BiocStyle::latex()
savewd = getwd()
###################################################
### code chunk number 2: DiffBind.Rnw:95-99
###################################################
tmp = tempfile(as.character(Sys.getpid()))
pdf(tmp)
savewarn = options("warn")
options(warn=-1)
###################################################
### code chunk number 3: DiffBind.Rnw:103-105
###################################################
library(DiffBind)
setwd(system.file("extra", package="DiffBind"))
###################################################
### code chunk number 4: DiffBind.Rnw:110-116 (eval = FALSE)
###################################################
## print(savewd)
## tamoxifen = dba(sampleSheet="tamoxifen.csv")
## tamoxifen = dba.count(tamoxifen)
## tamoxifen = dba.contrast(tamoxifen, categories=DBA_CONDITION)
## tamoxifen = dba.analyze(tamoxifen)
## tamoxifen.DB = dba.report(tamoxifen)
###################################################
### code chunk number 5: sampSheet
###################################################
samples = read.csv(file.path(system.file("extra", package="DiffBind"),
"tamoxifen.csv"))
samples
###################################################
### code chunk number 6: dbaContruct
###################################################
tamoxifen = dba(sampleSheet="tamoxifen.csv")
###################################################
### code chunk number 7: DiffBind.Rnw:140-141
###################################################
tamoxifen
###################################################
### code chunk number 8: tamox_occ_corhm
###################################################
plot(tamoxifen)
###################################################
### code chunk number 9: DiffBind.Rnw:164-165 (eval = FALSE)
###################################################
## tamoxifen = dba.count(tamoxifen, minOverlap=3)
###################################################
### code chunk number 10: DiffBind.Rnw:168-169
###################################################
data(tamoxifen_counts)
###################################################
### code chunk number 11: tamox_aff_corhm
###################################################
plot(tamoxifen)
###################################################
### code chunk number 12: DiffBind.Rnw:183-184
###################################################
tamoxifen = dba.contrast(tamoxifen, categories=DBA_CONDITION)
###################################################
### code chunk number 13: tamox_sdb_corhm
###################################################
tamoxifen = dba.analyze(tamoxifen)
###################################################
### code chunk number 14: DiffBind.Rnw:205-206
###################################################
tamoxifen
###################################################
### code chunk number 15: DiffBind.Rnw:219-220
###################################################
tamoxifen.DB = dba.report(tamoxifen)
###################################################
### code chunk number 16: DiffBind.Rnw:225-226
###################################################
tamoxifen.DB
###################################################
### code chunk number 17: tamox_sdb_ma
###################################################
data(tamoxifen_analysis)
dba.plotMA(tamoxifen)
###################################################
### code chunk number 18: tamox_aff_pca
###################################################
dba.plotPCA(tamoxifen,DBA_TISSUE,label=DBA_CONDITION)
###################################################
### code chunk number 19: tamox_sdb_pca
###################################################
dba.plotPCA(tamoxifen, contrast=1,th=.05,label=DBA_TISSUE)
###################################################
### code chunk number 20: DiffBind.Rnw:293-295
###################################################
sum(tamoxifen.DB$Fold<0)
sum(tamoxifen.DB$Fold>0)
###################################################
### code chunk number 21: tamox_sdb_box
###################################################
pvals = dba.plotBox(tamoxifen)
###################################################
### code chunk number 22: DiffBind.Rnw:311-312
###################################################
pvals
###################################################
### code chunk number 23: DiffBind.Rnw:322-323
###################################################
corvals = dba.plotHeatmap(tamoxifen)
###################################################
### code chunk number 24: tamox_sdb_hm
###################################################
corvals = dba.plotHeatmap(tamoxifen, contrast=1, correlations=FALSE)
###################################################
### code chunk number 25: DiffBind.Rnw:346-349
###################################################
data(tamoxifen_counts)
tamoxifen = dba.contrast(tamoxifen,categories=DBA_CONDITION,
block=tamoxifen$masks$MCF7)
###################################################
### code chunk number 26: DiffBind.Rnw:354-356
###################################################
tamoxifen = dba.analyze(tamoxifen)
tamoxifen
###################################################
### code chunk number 27: tamox_block_ma
###################################################
dba.plotMA(tamoxifen,method=DBA_EDGER_BLOCK)
###################################################
### code chunk number 28: tamox_block_corhm
###################################################
dba.plotHeatmap(tamoxifen,contrast=1,method=DBA_EDGER_BLOCK,
attributes=c(DBA_TISSUE,DBA_CONDITION,DBA_REPLICATE))
###################################################
### code chunk number 29: tamox_block_pca
###################################################
dba.plotPCA(tamoxifen,contrast=1,method=DBA_EDGER_BLOCK,
attributes=DBA_CONDITION,label=DBA_TISSUE)
###################################################
### code chunk number 30: DiffBind.Rnw:397-399
###################################################
tamoxifen = dba.analyze(tamoxifen,method=DBA_ALL_METHODS)
tamoxifen
###################################################
### code chunk number 31: tamox_block_venn
###################################################
tam.block = dba.report(tamoxifen,method=DBA_ALL_BLOCK,bDB=TRUE,bAll=TRUE)
tam.block
dba.plotVenn(tam.block,1:3,label1="edgeR",label2="DESeq",label3="DESeq2")
###################################################
### code chunk number 32: DiffBind.Rnw:425-426
###################################################
data(tamoxifen_peaks)
###################################################
### code chunk number 33: DiffBind.Rnw:442-444
###################################################
olap.rate = dba.overlap(tamoxifen,mode=DBA_OLAP_RATE)
olap.rate
###################################################
### code chunk number 34: tamox_rate
###################################################
plot(olap.rate,type='b',ylab='# peaks', xlab='Overlap at least this many peaksets')
###################################################
### code chunk number 35: DiffBind.Rnw:465-466
###################################################
names(tamoxifen$masks)
###################################################
### code chunk number 36: DiffBind.Rnw:471-473
###################################################
dba.overlap(tamoxifen,tamoxifen$masks$MCF7 & tamoxifen$masks$Responsive,
mode=DBA_OLAP_RATE)
###################################################
### code chunk number 37: tamox_mcf7_venn
###################################################
dba.plotVenn(tamoxifen, tamoxifen$masks$MCF7 & tamoxifen$masks$Responsive)
###################################################
### code chunk number 38: DiffBind.Rnw:495-498
###################################################
tamoxifen = dba.peakset(tamoxifen, consensus = c(DBA_TISSUE,DBA_CONDITION),
minOverlap=0.66)
tamoxifen
###################################################
### code chunk number 39: DiffBind.Rnw:506-508
###################################################
tamoxifen_consensus = dba(tamoxifen, mask = tamoxifen$masks$Consensus)
tamoxifen_consensus
###################################################
### code chunk number 40: tamox_lines_venn
###################################################
data(tamoxifen_peaks)
tamoxifen = dba.peakset(tamoxifen, consensus = DBA_TISSUE, minOverlap=0.66)
dba.plotVenn(tamoxifen, tamoxifen$masks$Consensus)
###################################################
### code chunk number 41: DiffBind.Rnw:530-531
###################################################
data(tamoxifen_peaks)
###################################################
### code chunk number 42: DiffBind.Rnw:536-538
###################################################
dba.overlap(tamoxifen,tamoxifen$masks$Resistant,mode=DBA_OLAP_RATE)
dba.overlap(tamoxifen,tamoxifen$masks$Responsive,mode=DBA_OLAP_RATE)
###################################################
### code chunk number 43: tamox_cons_venn
###################################################
tamoxifen = dba.peakset(tamoxifen, consensus = DBA_CONDITION, minOverlap = 0.33)
dba.plotVenn(tamoxifen,tamoxifen$masks$Consensus)
###################################################
### code chunk number 44: DiffBind.Rnw:567-568
###################################################
tamoxifen.OL = dba.overlap(tamoxifen, tamoxifen$masks$Consensus)
###################################################
### code chunk number 45: DiffBind.Rnw:573-575
###################################################
tamoxifen.OL$onlyA
tamoxifen.OL$onlyB
###################################################
### code chunk number 46: tamox_compare_venn
###################################################
tamoxifen = dba.peakset(tamoxifen,tamoxifen$masks$Consensus,
minOverlap=1,sampID="OL Consensus")
tamoxifen = dba.peakset(tamoxifen,!tamoxifen$masks$Consensus,
minOverlap=3,sampID="Consensus_3")
dba.plotVenn(tamoxifen,14:15)
###################################################
### code chunk number 47: DiffBind.Rnw:602-603
###################################################
data(tamoxifen_analysis)
###################################################
### code chunk number 48: DiffBind.Rnw:608-609
###################################################
tamoxifen.rep = dba.report(tamoxifen,bCalled=T,th=1)
###################################################
### code chunk number 49: DiffBind.Rnw:614-620
###################################################
onlyResistant = tamoxifen.rep$Called1>=2 & tamoxifen.rep$Called2<3
sum(onlyResistant )
onlyResponsive = tamoxifen.rep$Called2>=3 & tamoxifen.rep$Called1<2
sum(onlyResponsive)
bothGroups = tamoxifen.rep$Called1>= 2 & tamoxifen.rep$Called2>=3
sum(bothGroups)
###################################################
### code chunk number 50: DiffBind.Rnw:632-639
###################################################
tamoxifen.DB = dba.report(tamoxifen,bCalled=T,th=.1)
onlyResistant.DB = tamoxifen.DB$Called1>=2 & tamoxifen.DB$Called2<3
sum(onlyResistant.DB)
onlyResponsive.DB = tamoxifen.DB$Called2>=3 & tamoxifen.DB$Called1<2
sum(onlyResponsive.DB)
bothGroups.DB = tamoxifen.DB$Called1>=2 & tamoxifen.DB$Called2>=3
sum(bothGroups.DB)
###################################################
### code chunk number 51: DiffBind.Rnw:780-781 (eval = FALSE)
###################################################
## file.path(system.file("extra", package="DiffBind"),"tamoxifen_GEO.csv")
###################################################
### code chunk number 52: sessionInfo
###################################################
toLatex(sessionInfo())
###################################################
### code chunk number 53: DiffBind.Rnw:811-812
###################################################
setwd(savewd)
<file_sep>/AKT_ChIP.r
## ChIP seq analysis of AKT project using DiffBind Package
# Function to check bioconductor packages
installpkg <- function(pkg){
if(!require(pkg,character.only=T)){
source("http://bioconductor.org/biocLite.R")
biocLite(pkg)
}
require(pkg,character.only=T)
}
# Setting R file folder as a working directory
setwd("F:/Chip - and RNA-seq/AKT Chip-Seq/#Bin Anaylsis")
# Backup information
tmp = tempfile(as.character(Sys.getpid()))
pdf(tmp)
savewarn = options("warn")
options(warn=-1)
# Loading Package
installpkg("DiffBind")
# Get sample sheet
samples = read.csv("AKT_ChIP_samplesheet.csv")
samples
| 7ceb14f5e792e0947ff522520fbc84d90aac5f8a | [
"R"
] | 3 | R | easter0731/Bioconductor | b9f0085b2ba5d257d47cec4be436a1350acc7c56 | 7781c3aca2acf47b4fd4cbf903521b544c59769b | |
refs/heads/master | <file_sep>package com.example.tinylove.Database;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
public class MyDataBaseHelper extends SQLiteOpenHelper{
public MyDataBaseHelper(Context context, String name, int version) {
super(context, name, null, version);
// TODO Auto-generated constructor stub
System.out.println("Create Database!");
}
@Override
public void onCreate(SQLiteDatabase db) {
// TODO Auto-generated method stub
db.execSQL(TinyUser.CREATE_TABLE_TINYUSER);
db.execSQL(TinyAnni.CREATE_TABLE_TINYANNI);
db.execSQL(TinyWish.CREATE_TABLE_TINYWISH);
db.execSQL(TinyCheck.CREATE_TABLE_TINYCHECK);
db.execSQL(TinyTimePicture.CREATE_TABLE_TINYTIME);
System.out.println("Create Table!");
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// TODO Auto-generated method stub
System.out.println("onUpdate Called! "+oldVersion+"->"+newVersion);
}
}
<file_sep>package com.example.tinylove.Activity;
import java.util.Date;
import com.example.tinylove.R;
import com.example.tinylove.Database.MyDataBaseHelper;
import com.example.tinylove.Database.TinyAnni;
import com.example.tinylove.Database.TinyUser;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
public class SplashActivity extends Activity {
public static MyDataBaseHelper dbHelper;
// ÑÓ³Ù3Ãë
private static final long SPLASH_DELAY_MILLIS = 1000;
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 1000:
Intent intent1 = new Intent(SplashActivity.this, MainActivity.class);
SplashActivity.this.startActivity(intent1);
SplashActivity.this.finish();
break;
case 1001:
Intent intent2 = new Intent(SplashActivity.this, GuideActivity.class);
SplashActivity.this.startActivity(intent2);
SplashActivity.this.finish();
break;
}
super.handleMessage(msg);
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
SharedPreferences sharedPreferences = this.getSharedPreferences("share", MODE_PRIVATE);
boolean isFirstRun = sharedPreferences.getBoolean("isFirstRun", true);
Editor editor = sharedPreferences.edit();
if (isFirstRun)
{
editor.putBoolean("isFirstRun", false);
editor.commit();
System.out.println("isFirstRun");
} else
{
System.out.println("notFirstRun");
}
dbHelper=new MyDataBaseHelper(this,"tintloveDB1.db3", 1);
//Date test
Date date=new Date();
System.out.println("nochange"+date.getYear()+" "+date.getMonth()+" "+date.getDate()+" "+date.getDay());
System.out.println("change"+(date.getYear()+1900)+" "+(date.getMonth()+1)+" "+date.getDate()+" "+date.getDay());
date.setYear(115);
date.setMonth(10);
date.setDate(1);
System.out.println("set_nochange"+date.getYear()+" "+date.getMonth()+" "+date.getDate()+" "+date.getDay());
System.out.println("set_change"+(date.getYear()+1900)+" "+(date.getMonth()+1)+" "+date.getDate()+" "+date.getDay());
// TinyUser user=new TinyUser();
// user.display();
// TinyAnni anni=new TinyAnni();
// anni.display();
mHandler.sendEmptyMessageDelayed(1001, SPLASH_DELAY_MILLIS);
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.LinearLayout;
public class SettingFragment extends Fragment implements OnClickListener {
private LinearLayout llUser;
private LinearLayout llAboutUs;
private LinearLayout llGitHub;
private LinearLayout llLogout;
private LinearLayout llExit;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_setting, container, false);
return rootView;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onViewCreated(view, savedInstanceState);
llUser=(LinearLayout)view.findViewById(R.id.setting_edit_user);
llAboutUs=(LinearLayout)view.findViewById(R.id.setting_about_us);
llGitHub=(LinearLayout)view.findViewById(R.id.setting_github);
llLogout=(LinearLayout)view.findViewById(R.id.setting_logout);
llExit=(LinearLayout)view.findViewById(R.id.setting_exit);
llUser.setOnClickListener(this);
llAboutUs.setOnClickListener(this);
llGitHub.setOnClickListener(this);
llLogout.setOnClickListener(this);
llExit.setOnClickListener(this);
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(v.getId()==R.id.setting_edit_user){
Intent iu=new Intent(getActivity(), UserEditActivity.class);
startActivity(iu);
}
else if(v.getId()==R.id.setting_about_us){
AboutUsDialog.content=" 这是一款针对情侣设计的应用软件,致力于让情侣的生活更加美好。\n"
+ " 【情侣纪念日】:记录情侣之间每一个值得纪念的日子。\n"
+ " 【情侣愿望】:足下愿望,两个人一起去实现它。\n"
+ " 【联系TA】:想TA?还等什么快去给TA打电话吧。\n"
+ " 【情侣打卡】:共同完成甜蜜的小日常。\n"
+ " 【微爱时光】:用照片记录下爱的时光。\n";
Intent ip=new Intent(getActivity(), AboutUsDialog.class);
startActivity(ip);
}
else if(v.getId()==R.id.setting_github){
Intent ib=new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/jid1311644/TinyLove"));
startActivity(ib);
}
else if(v.getId()==R.id.setting_logout){
ConfirmDialog.content="确认注销?";
Intent ic=new Intent(getActivity(), ConfirmDialog.class);
startActivityForResult(ic, 11);
}
else if(v.getId()==R.id.setting_exit){
ConfirmDialog.content="确认退出?";
Intent ic0=new Intent(getActivity(), ConfirmDialog.class);
startActivityForResult(ic0, 12);
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
super.onActivityResult(requestCode, resultCode, data);
if(requestCode==11&&resultCode==0){
if(ConfirmDialog.OK){
Intent ig=new Intent(getActivity(), GuideActivity.class);
startActivity(ig);
getActivity().finish();
}
}
if(requestCode==12&&resultCode==0){
if(ConfirmDialog.OK){
System.exit(0);
}
}
}
}
<file_sep>package com.example.tinylove.Database;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.LinkedList;
import com.example.tinylove.Activity.AnniActivity;
import com.example.tinylove.Activity.MainActivity;
import com.example.tinylove.Activity.SplashActivity;
import android.content.ContentValues;
import android.database.Cursor;
public class TinyAnni {
public final static String CREATE_TABLE_TINYANNI=
"CREATE TABLE `tiny_anni` ("
+ "`user_name` varchar(50) NOT NULL,"
+ "`anni_id` varchar(10) NOT NULL,"
+ "`anni_content` varchar(100) NOT NULL,"
+ "`anni_year` varchar(4) DEFAULT NULL,"
+ "`anni_month` varchar(2) DEFAULT NULL,"
+ "`anni_day` varchar(2) DEFAULT NULL,"
+ "`anni_time_type` int(1) NOT NULL,"
+ "`anni_color` varchar(2) NOT NULL,"
+ "`anni_frequent` varchar(4) NOT NULL,"
+ "`anni_background` varchar(2) DEFAULT NULL,"
+ "PRIMARY KEY (`user_name`,`anni_id`),"
+ "FOREIGN KEY (`user_name`) REFERENCES `tiny_user` (`user_name`)"
+ ")";
public String userName;
public String anniID;
public String anniContent;
public String anniYear;
public String anniMonth;
public String anniDay;
public int anniTimeType; //0公历 1农历
public String anniColor;
public String anniFrequent;
public String anniBackground;
public void init(String name){
ContentValues values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0001");
values.put("anni_content", "我们在一起");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_TOGETHER);
values.put("anni_frequent", "无");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0002");
values.put("anni_content", "TA的生日");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_BIRTHDAY);
values.put("anni_frequent", "每年");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0003");
values.put("anni_content", "我的生日");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_BIRTHDAY);
values.put("anni_frequent", "每年");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0004");
values.put("anni_content", "第一次拥抱");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_HUG);
values.put("anni_frequent", "无");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0005");
values.put("anni_content", "第一次接吻");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_TOGETHER);
values.put("anni_frequent", "无");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0006");
values.put("anni_content", "第一次一起去旅行");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_TRAVEL);
values.put("anni_frequent", "无");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
values=new ContentValues();
values.put("user_name", name);
values.put("anni_id", "0007");
values.put("anni_content", "我们的结婚纪念日");
values.put("anni_year", "");
values.put("anni_month", "");
values.put("anni_day", "");
values.put("anni_time_type", 0);
values.put("anni_color", AnniActivity.ANNI_BACK_MARRY);
values.put("anni_frequent", "每年");
values.put("anni_background", "");
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
}
LinkedList<String> ids=new LinkedList<String>();
LinkedList<String> events=new LinkedList<String>();
LinkedList<String> days=new LinkedList<String>();
LinkedList<String> y=new LinkedList<String>();
LinkedList<String> m=new LinkedList<String>();
LinkedList<String> d=new LinkedList<String>();
LinkedList<Integer> back=new LinkedList<Integer>();
public void getData(String name){
String sql="select * from tiny_anni where user_name='"+name+"'";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
while(cursor.moveToNext()){
ids.add(cursor.getString(1));
events.add(cursor.getString(2));
String y0=cursor.getString(3);
String m0=cursor.getString(4);
String d0=cursor.getString(5);
y.add(y0);
m.add(m0);
d.add(d0);
back.add(Integer.valueOf(cursor.getString(7)));
if(!y0.equals("")&&!m0.equals("")&&!d0.equals("")){
if(m0.length()==1)m0="0"+m0;
if(d0.length()==1)d0="0"+d0;
String date=y0+"-"+m0+"-"+d0;
String fre=cursor.getString(8);
days.add(getDays(date, fre)+"");
}
else{
days.add("9999");
}
}
}
public String getFrequent(String name,String id){
String sql="select anni_frequent from tiny_anni "
+ " where user_name='"+name+"' "
+ " and anni_id='"+id+"' ";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
String f="";
while(cursor.moveToNext()){
f=cursor.getString(0);
}
return f;
}
public String getBackgroundImage(String name,String id){
String sql="select anni_background from tiny_anni "
+ " where user_name='"+name+"' "
+ " and anni_id='"+id+"' ";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
String f="";
while(cursor.moveToNext()){
f=cursor.getString(0);
}
return f;
}
public LinkedList<String> getIds() {
return ids;
}
public void setIds(LinkedList<String> ids) {
this.ids = ids;
}
public LinkedList<String> getEvents() {
return events;
}
public void setEvents(LinkedList<String> events) {
this.events = events;
}
public LinkedList<String> getDays() {
return days;
}
public void setDays(LinkedList<String> days) {
this.days = days;
}
public LinkedList<String> getY() {
return y;
}
public void setY(LinkedList<String> y) {
this.y = y;
}
public LinkedList<String> getM() {
return m;
}
public void setM(LinkedList<String> m) {
this.m = m;
}
public LinkedList<String> getD() {
return d;
}
public void setD(LinkedList<String> d) {
this.d = d;
}
public LinkedList<Integer> getBack() {
return back;
}
public void setBack(LinkedList<Integer> back) {
this.back = back;
}
public int getLastID(){
String sql="select anni_id from tiny_anni";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
String id="";
while(cursor.moveToNext()){
id=cursor.getString(0);
}
return Integer.valueOf(id);
}
public int getDays(String date,String frequent){
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd");
try {
Date lastday=sdf.parse(date);
Date today=sdf.parse(sdf.format(new Date()));
if(frequent.equals("无")){
int d=daysBetween(lastday, today);
if(d<0)d=-d;
return d;
}
else if(frequent.equals("每周")){
int weekOfD1=dayForWeek(lastday);
int weekOfD2=dayForWeek(today);
if(weekOfD2>weekOfD1){
return weekOfD1-weekOfD2+7;
}
else{
return weekOfD1-weekOfD2;
}
}
else if(frequent.equals("每月")){
int dL=lastday.getDate();
int dT=today.getDate();
if(dT<=dL){
return dL-dT;
}
else{
lastday.setMonth(today.getMonth()+1);
return daysBetween(today, lastday);
}
}
else if(frequent.equals("每年")){
lastday.setYear(today.getYear());
int d=daysBetween(today, lastday);
if(d>0){
return d;
}
else{
lastday.setYear(today.getYear()+1);
return daysBetween(today, lastday);
}
}
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return -1;
}
public int getDays(String name){
String sql=" select anni_year,anni_month,anni_day from tiny_anni "
+ " where user_name='"+name+"' "
+ " and anni_id='0001' ";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
String date="";
while(cursor.moveToNext()){
if(!cursor.getString(0).equals("")&&!cursor.getString(1).equals("")&&!cursor.getString(2).equals("")){
date=cursor.getString(0)+"-"+cursor.getString(1)+"-"+cursor.getString(2);
}
}
if(!date.equals("")){
return getDays(date,"无");
}
return 0;
}
public String getTargetDate(String date,String frequent){
String[] weeks={"星期日","星期一","星期二","星期三","星期四","星期五","星期六"};
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd");
try {
Date lastday=sdf.parse(date);
Date today=sdf.parse(sdf.format(new Date()));
if(frequent.equals("无")){
return date+" "+weeks[dayForWeek(lastday)];
}
else if(frequent.equals("每周")){
int weekOfD1=dayForWeek(lastday);
int weekOfD2=dayForWeek(today);
if(weekOfD2>weekOfD1){
today.setDate(today.getDate()+weekOfD1-weekOfD2+7);
return sdf.format(today)+" "+weeks[dayForWeek(lastday)];
}
else{
today.setDate(today.getDate()+weekOfD1-weekOfD2);
return sdf.format(today)+" "+weeks[dayForWeek(lastday)];
}
}
else if(frequent.equals("每月")){
int dL=lastday.getDate();
int dT=today.getDate();
if(dT<=dL){
lastday.setMonth(today.getMonth());
return sdf.format(lastday)+" "+weeks[dayForWeek(lastday)];
}
else{
lastday.setMonth(today.getMonth()+1);
return sdf.format(lastday)+" "+weeks[dayForWeek(lastday)];
}
}
else if(frequent.equals("每年")){
lastday.setYear(today.getYear());
int d=daysBetween(today, lastday);
if(d>0){
return sdf.format(lastday)+" "+weeks[dayForWeek(lastday)];
}
else{
lastday.setYear(today.getYear()+1);
return sdf.format(lastday)+" "+weeks[dayForWeek(lastday)];
}
}
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
public String getAnniContent(String id){
String c="";
String sql="select anni_content from tiny_anni where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
while(cursor.moveToNext()){
c=cursor.getString(0);
}
return c;
}
public String getAnniDate(String id){
String c="";
String sql="select anni_year,anni_month,anni_day from tiny_anni where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
while(cursor.moveToNext()){
c=cursor.getString(0)+"-"+cursor.getString(1)+"-"+cursor.getString(2);
}
return c;
}
public int getBackground(String id){
int i=0;
String sql="select anni_background from tiny_anni where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
while(cursor.moveToNext()){
i=Integer.valueOf(cursor.getString(0));
}
return i;
}
public boolean setBackground(int b,String id){
String sql="update tiny_anni set anni_background='"+b+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
SplashActivity.dbHelper.getReadableDatabase().execSQL(sql);
return true;
}
public int daysBetween(Date smdate,Date bdate){
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd");
try {
smdate=sdf.parse(sdf.format(smdate));
bdate=sdf.parse(sdf.format(bdate));
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Calendar cal = Calendar.getInstance();
cal.setTime(smdate);
long time1 = cal.getTimeInMillis();
cal.setTime(bdate);
long time2 = cal.getTimeInMillis();
long between_days=(time2-time1)/(1000*3600*24);
return Integer.parseInt(String.valueOf(between_days));
}
public int dayForWeek(Date date){
int[] weekOfDays = {0, 1, 2, 3, 4, 5, 6};
Calendar calendar = Calendar.getInstance();
if(date != null){
calendar.setTime(date);
}
int w = calendar.get(Calendar.DAY_OF_WEEK) - 1;
if (w < 0){
w = 0;
}
return weekOfDays[w];
}
public boolean addSystemAnni(){
String[] sql=new String[5];
sql[0]="update tiny_anni set anni_year='"+anniYear+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[1]="update tiny_anni set anni_month='"+anniMonth+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[2]="update tiny_anni set anni_day='"+anniDay+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[3]="update tiny_anni set anni_color='"+anniColor+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[4]="update tiny_anni set anni_background='"+anniBackground+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
for(int i=0;i<5;i++){
SplashActivity.dbHelper.getReadableDatabase().execSQL(sql[i]);
}
return true;
}
public boolean updateSystemAnni(){
String[] sql=new String[4];
sql[0]="update tiny_anni set anni_year='"+anniYear+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[1]="update tiny_anni set anni_month='"+anniMonth+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[2]="update tiny_anni set anni_day='"+anniDay+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[3]="update tiny_anni set anni_color='"+anniColor+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
for(int i=0;i<4;i++){
SplashActivity.dbHelper.getReadableDatabase().execSQL(sql[i]);
}
return true;
}
public boolean addAnni(){
ContentValues values=new ContentValues();
values.put("user_name", userName);
values.put("anni_id", anniID);
values.put("anni_content", anniContent);
values.put("anni_year", anniYear);
values.put("anni_month", anniMonth);
values.put("anni_day", anniDay);
values.put("anni_time_type", 0);
values.put("anni_color", anniColor);
values.put("anni_frequent", anniFrequent);
values.put("anni_background", anniBackground);
SplashActivity.dbHelper.getReadableDatabase().insert("tiny_anni", null, values);
return true;
}
public boolean updateAnni(){
String[] sql=new String[6];
sql[0]="update tiny_anni set anni_year='"+anniYear+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[1]="update tiny_anni set anni_month='"+anniMonth+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[2]="update tiny_anni set anni_day='"+anniDay+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[3]="update tiny_anni set anni_color='"+anniColor+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[4]="update tiny_anni set anni_content='"+anniContent+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
sql[5]="update tiny_anni set anni_frequent='"+anniFrequent+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+anniID+"'";
for(int i=0;i<6;i++){
SplashActivity.dbHelper.getReadableDatabase().execSQL(sql[i]);
}
return true;
}
public boolean deleteSystemAnni(String id){
String[] sql=new String[5];
int a;
if((a=Integer.valueOf(id))<=6){
sql[0]="update tiny_anni set anni_year='' where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
sql[1]="update tiny_anni set anni_month='' where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
sql[2]="update tiny_anni set anni_day='' where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
sql[3]="update tiny_anni set anni_color='"+(a+6)+"' where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
sql[4]="update tiny_anni set anni_background='' where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
for(int i=0;i<5;i++){
SplashActivity.dbHelper.getReadableDatabase().execSQL(sql[i]);
}
return true;
}
else{
return false;
}
}
public boolean deleteAnni(String id){
int a;
if((a=Integer.valueOf(id))>6){
String sql="delete from tiny_anni where user_name='"+MainActivity.currentUser+"' and anni_id='"+id+"'";
SplashActivity.dbHelper.getReadableDatabase().execSQL(sql);
return true;
}
else{
return false;
}
}
public void display(){
String sql="select * from tiny_anni";
Cursor cursor=SplashActivity.dbHelper.getReadableDatabase().rawQuery(sql, null);
int count=0;
while(cursor.moveToNext()){
System.out.println((count++)+" ;"+cursor.getString(0)+" ;"+cursor.getString(1)+" ;"+cursor.getString(2)+" ;"
+cursor.getString(3)+" ;"+cursor.getInt(4)+" ;"+cursor.getString(5)+" ;"+cursor.getInt(6)
+" ;"+cursor.getString(7)+" ;"+cursor.getString(8)+" ;"+cursor.getString(9));
}
}
}
<file_sep>package com.example.tinylove.Interface;
public interface ItemClickListener {
public void onItemClickListener(int position);
}
<file_sep>package com.example.tinylove.Activity;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.LinkedList;
import com.example.tinylove.R;
import com.example.tinylove.Database.TinyTimePicture;
import com.example.tinylove.View.OutlineContainer;
import com.example.tinylove.View.PictureView;
import com.example.tinylove.View.PictureView.TransitionEffect;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class TimeFragment extends Fragment implements OnClickListener {
private PictureView pictureView;
private TextView mode;
private ImageView add;
private ImageView delete;
public static TransitionEffect effect=TransitionEffect.Tablet;
private View v;
public static String PATH="";
private int position=0;
private LinkedList<String> pictures=new LinkedList<String>();
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_time, container, false);
return rootView;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onViewCreated(view, savedInstanceState);
this.v=view;
mode=(TextView)view.findViewById(R.id.time_picture_mode);
add=(ImageView)view.findViewById(R.id.time_picture_add);
delete=(ImageView)view.findViewById(R.id.time_picture_delete);
setPlayMode(view, effect);
mode.setOnClickListener(this);
add.setOnClickListener(this);
delete.setOnClickListener(this);
}
private void setPlayMode(View v,TransitionEffect effect){
TinyTimePicture time=new TinyTimePicture();
time.getData(MainActivity.currentUser);
pictures=time.getPictures();
// for(int i=0;i<pictures.size();i++)
// System.out.println(i+" "+pictures.get(i));
// System.out.println(effect);
pictureView = (PictureView)v.findViewById(R.id.time_picture);
pictureView.setTransitionEffect(effect);
pictureView.setAdapter(new MyPictureAdapter(pictures));
pictureView.setPageMargin(30);
pictureView.setOnPageChangeListener(new OnPageChangeListener() {
@Override
public void onPageSelected(int arg0) {
// TODO Auto-generated method stub
position=arg0;
}
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
// TODO Auto-generated method stub
}
@Override
public void onPageScrollStateChanged(int arg0) {
// TODO Auto-generated method stub
}
});
}
private class MyPictureAdapter extends PagerAdapter{
LinkedList<String> pictures;
public MyPictureAdapter(LinkedList<String> pictures) {
// TODO Auto-generated constructor stub
this.pictures=pictures;
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return pictures.size();
}
@Override
public boolean isViewFromObject(View view, Object arg1) {
// TODO Auto-generated method stub
if (view instanceof OutlineContainer)
{
return ((OutlineContainer) view).getChildAt(0) == arg1;
} else
{
return view == arg1;
}
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
// TODO Auto-generated method stub
container.removeView(pictureView.findViewFromObject(position));
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
// TODO Auto-generated method stub
ImageView image=new ImageView(getActivity());
Bitmap bitmap=getLoacalBitmap(pictures.get(position));
image.setImageBitmap(bitmap);
container.addView(image, LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT);
pictureView.setObjectForPosition(image, position);
return image;
}
}
public static Bitmap getLoacalBitmap(String url) {
try {
FileInputStream fis = new FileInputStream(url);
return BitmapFactory.decodeStream(fis); ///把流转化为Bitmap图片
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
}
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.time_picture_add:
GetPictureDialog.isHomePage=false;
View vt=new View(getActivity());
GetPictureDialog.imageView=vt;
SimpleDateFormat sdf=new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss");
String name=sdf.format(new Date(System.currentTimeMillis()));
name="image"+name+".png";
GetPictureDialog.fileName=name;
Intent i=new Intent(getActivity(), GetPictureDialog.class);
startActivityForResult(i,8);
break;
case R.id.time_picture_delete:
if(pictures.size()>0){
ConfirmDialog.content="确认删除这张图片吗?";
Intent ic=new Intent(getActivity(), ConfirmDialog.class);
startActivityForResult(ic,10);
}
break;
case R.id.time_picture_mode:
Intent intent=new Intent(getActivity(), TimeSelectModeDialog.class);
startActivityForResult(intent, 9);
break;
default:
break;
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
super.onActivityResult(requestCode, resultCode, data);
if(requestCode==9&&resultCode==0){
setPlayMode(v, effect);
}
if(requestCode==8&&resultCode==0){
if(GetPictureDialog.isSelect){
TinyTimePicture picture=new TinyTimePicture();
picture.addPath(MainActivity.currentUser, TimeFragment.PATH);
setPlayMode(v, TransitionEffect.Tablet);
}
}
if(requestCode==10&&resultCode==0){
if(ConfirmDialog.OK){
TinyTimePicture time=new TinyTimePicture();
time.getData(MainActivity.currentUser);
time.deletePath(MainActivity.currentUser, time.getPictures().get(position));
setPlayMode(v, TransitionEffect.Tablet);
}
}
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import com.example.tinylove.Database.TinyWish;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.ImageView;
import android.widget.RadioButton;
import android.widget.TextView;
public class WishDetailDialog extends Activity {
private TextView textWishDetailContent;
private TextView textWishDetailTime;
private CheckBox imageWishIsFinish;
private TextView textWishConfirm;
private TextView textWishDelete;
public static String wishID="";
public static String wishCon="";
public static String wishTime="";
public static int wishState=0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_wish_detail);
initView();
setOnListeners();
}
private void initView() {
// TODO Auto-generated method stub
textWishDetailContent=(TextView)findViewById(R.id.wish_detail_content);
textWishDetailTime=(TextView)findViewById(R.id.wish_detail_time);
imageWishIsFinish=(CheckBox)findViewById(R.id.wish_detail_finish);
textWishConfirm=(TextView)findViewById(R.id.wish_ok);
textWishDelete=(TextView)findViewById(R.id.wish_delete);
textWishDetailContent.setText(wishCon);
textWishDetailTime.setText(wishTime);
if(wishState==1){
imageWishIsFinish.setChecked(true);
imageWishIsFinish.setBackgroundResource(R.drawable.wish_finish);
}
else if(wishState==0){
imageWishIsFinish.setChecked(false);
imageWishIsFinish.setBackgroundResource(R.drawable.wish_no_finish);
}
}
private void setOnListeners() {
// TODO Auto-generated method stub
imageWishIsFinish.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// TODO Auto-generated method stub
if(isChecked){
imageWishIsFinish.setBackgroundResource(R.drawable.wish_finish);
wishState=1;
}
else{
imageWishIsFinish.setBackgroundResource(R.drawable.wish_no_finish);
wishState=0;
}
}
});
textWishConfirm.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
TinyWish wish=new TinyWish();
wish.setState(WishDetailDialog.wishID, wishState);
WishActivity.activity.finish();
Intent intent=new Intent(WishDetailDialog.this, WishActivity.class);
startActivity(intent);
finish();
}
});
textWishDelete.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
//delete
TinyWish wish=new TinyWish();
wish.delete(WishDetailDialog.wishID);
WishActivity.activity.finish();
Intent intent=new Intent(WishDetailDialog.this, WishActivity.class);
startActivity(intent);
finish();
}
});
}
}
<file_sep>package com.example.tinylove.Adapter;
import java.util.LinkedList;
import com.example.tinylove.R;
import com.example.tinylove.Interface.ItemClickListener;
import com.example.tinylove.View.CustomImageView;
import android.graphics.Color;
import android.graphics.drawable.GradientDrawable;
import android.net.wifi.WifiEnterpriseConfig;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
public class MyRecyclerAdapter extends RecyclerView.Adapter<MyRecyclerAdapter.ViewHolder>{
//添加纪念日时标签背景颜色和图片
final int[] color={R.color.eventsColor1,R.color.eventsColor2,R.color.eventsColor3,
R.color.eventsColor4,R.color.eventsColor5,R.color.eventsColor6,
R.color.eventsColor7,R.drawable.anni_together_icon,R.drawable.anni_birthday_icon,
R.drawable.anni_kiss_icon,R.drawable.anni_hug_icon,R.drawable.anni_marry_icon,
R.drawable.anni_travel_icon};
//纪念日list的相关参数
LinkedList<String> dataAnniEvent;
LinkedList<String> dataAnniDays;
LinkedList<String> dataAnniYear;
LinkedList<String> dataAnniMonth;
LinkedList<String> dataAnniDay;
LinkedList<Integer> dataAnniColor;
ItemClickListener itemClickListener;
public void setOnClickListener(ItemClickListener itemClickListener){
this.itemClickListener=itemClickListener;
}
//纪念日list数据初始化
public MyRecyclerAdapter(LinkedList<String> events,LinkedList<String> days,
LinkedList<String> Y,LinkedList<String> M,LinkedList<String> D,LinkedList<Integer> back){
this.dataAnniDays=days;
this.dataAnniEvent=events;
this.dataAnniYear=Y;
this.dataAnniMonth=M;
this.dataAnniDay=D;
this.dataAnniColor=back;
}
@Override
public int getItemCount() {
// TODO Auto-generated method stub
return dataAnniEvent.size();
}
@Override
public void onBindViewHolder(MyRecyclerAdapter.ViewHolder viewHolder, int position) {
// TODO Auto-generated method stub
if(dataAnniColor.get(position)>6){
viewHolder.textAnniYandM.setVisibility(View.GONE);
viewHolder.textAnniD.setVisibility(View.GONE);
viewHolder.imageBack.setVisibility(View.VISIBLE);
viewHolder.textAnniDay.setVisibility(View.GONE);
viewHolder.textAnniLong.setVisibility(View.GONE);
viewHolder.imageAddEvent.setVisibility(View.VISIBLE);
viewHolder.llAnniBack.setBackgroundColor(Color.TRANSPARENT);
viewHolder.imageBack.setImageResource(color[dataAnniColor.get(position)]);
viewHolder.textAnniEvent.setText(dataAnniEvent.get(position));
}
else{
viewHolder.textAnniYandM.setVisibility(View.VISIBLE);
viewHolder.textAnniD.setVisibility(View.VISIBLE);
viewHolder.imageBack.setVisibility(View.GONE);
viewHolder.textAnniDay.setVisibility(View.VISIBLE);
viewHolder.textAnniLong.setVisibility(View.VISIBLE);
viewHolder.imageAddEvent.setVisibility(View.GONE);
viewHolder.textAnniYandM.setText(dataAnniYear.get(position)+" "+dataAnniMonth.get(position));
viewHolder.textAnniD.setText(dataAnniDay.get(position));
viewHolder.textAnniEvent.setText(dataAnniEvent.get(position));
viewHolder.textAnniLong.setText(dataAnniDays.get(position));
viewHolder.llAnniBack.setBackgroundResource(color[dataAnniColor.get(position)]);
}
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup viewGroup, int position) {
// TODO Auto-generated method stub
View view=LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.anni_recycler_item, viewGroup,false);
return new ViewHolder(view, itemClickListener);
}
class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
//纪念日list_item涉及到的控件
LinearLayout llAnniBack;
TextView textAnniYandM;
TextView textAnniD;
ImageView imageBack;
TextView textAnniEvent;
TextView textAnniLong;
TextView textAnniDay;
ImageView imageAddEvent;
ItemClickListener listener;
public ViewHolder(View itemView,ItemClickListener itemClickListener) {
super(itemView);
// TODO Auto-generated constructor stub
//纪念日list_item涉及到的控件
llAnniBack=(LinearLayout)itemView.findViewById(R.id.anni_recycler_item_picture);
textAnniYandM=(TextView)itemView.findViewById(R.id.anni_recycler_item_text_yearmonth);
textAnniD=(TextView)itemView.findViewById(R.id.anni_recycler_item_text_day);
imageBack=(ImageView)itemView.findViewById(R.id.anni_recycler_item_back_picture);
textAnniEvent=(TextView)itemView.findViewById(R.id.anni_recycler_item_content);
textAnniLong=(TextView)itemView.findViewById(R.id.anni_recycler_item_days);
textAnniDay=(TextView)itemView.findViewById(R.id.anni_recycler_item_day);
imageAddEvent=(ImageView)itemView.findViewById(R.id.anni_recycler_item_add_event);
itemView.setOnClickListener(this);
this.listener=itemClickListener;
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
listener.onItemClickListener(getPosition());
}
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import com.example.tinylove.Adapter.MyBackgroundRecyclerAdapter;
import com.example.tinylove.Adapter.MyRecyclerAdapter;
import com.example.tinylove.Database.TinyAnni;
import com.example.tinylove.Interface.ItemClickListener;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
public class SelectBackgroundActivity extends Activity {
final int[] backgroundBigBlur={R.drawable.anni_bg_large_blur_1,R.drawable.anni_bg_large_blur_2,R.drawable.anni_bg_large_blur_3,R.drawable.anni_bg_large_blur_4,
R.drawable.anni_bg_large_blur_5,R.drawable.anni_bg_large_blur_6,R.drawable.anni_bg_large_blur_7,R.drawable.anni_bg_large_blur_8,
R.drawable.anni_bg_large_blur_9,R.drawable.anni_bg_large_blur_10,R.drawable.anni_bg_large_blur_11,R.drawable.anni_bg_large_blur_12,
R.drawable.anni_bg_large_blur_13,R.drawable.anni_bg_large_blur_14,R.drawable.anni_bg_large_blur_15,R.drawable.anni_bg_large_blur_16,
R.drawable.anni_bg_large_blur_17,R.drawable.anni_bg_large_blur_18,R.drawable.anni_bg_large_blur_19,R.drawable.anni_bg_large_blur_20};
final int[] backgroundBig={R.drawable.anni_bg_large_1,R.drawable.anni_bg_large_2,R.drawable.anni_bg_large_3,R.drawable.anni_bg_large_4,R.drawable.anni_bg_large_5,
R.drawable.anni_bg_large_6,R.drawable.anni_bg_large_7,R.drawable.anni_bg_large_8,R.drawable.anni_bg_large_9,R.drawable.anni_bg_large_10,
R.drawable.anni_bg_large_11,R.drawable.anni_bg_large_12,R.drawable.anni_bg_large_13,R.drawable.anni_bg_large_14,R.drawable.anni_bg_large_15,
R.drawable.anni_bg_large_16,R.drawable.anni_bg_large_17,R.drawable.anni_bg_large_18,R.drawable.anni_bg_large_19,R.drawable.anni_bg_large_20};
private ImageView back;
private TextView complete;
private RelativeLayout bigFlurBackGround;
private LinearLayout bigBackground;
private TextView text1;
private TextView text2;
private TextView text3;
private RecyclerView selectBackground;
private RecyclerView.LayoutManager layoutManage;
private MyBackgroundRecyclerAdapter adapter;
public static String anniID="";
public static int IMAGE=0;
private int lastImage=0;
public static boolean isDetail;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_select_background);
initView();
setOnListeners();
}
private void initView() {
// TODO Auto-generated method stub
back=(ImageView)findViewById(R.id.add_anni_background_back);
complete=(TextView)findViewById(R.id.add_anni_background_next);
bigFlurBackGround=(RelativeLayout)findViewById(R.id.add_anni_flur_background);
bigBackground=(LinearLayout)findViewById(R.id.add_anni_background_big_picture);
bigFlurBackGround.setBackgroundResource(backgroundBigBlur[IMAGE]);
bigBackground.setBackgroundResource(backgroundBig[IMAGE]);
text1=(TextView)findViewById(R.id.add_anni_background_big_picture_text1);
text2=(TextView)findViewById(R.id.add_anni_background_big_picture_text2);
text3=(TextView)findViewById(R.id.add_anni_background_big_picture_text3);
selectBackground=(RecyclerView)findViewById(R.id.add_anni_recycler);
selectBackground.setHasFixedSize(true);
layoutManage=new LinearLayoutManager(SelectBackgroundActivity.this);
((LinearLayoutManager) layoutManage).setOrientation(LinearLayoutManager.HORIZONTAL);
selectBackground.setLayoutManager(layoutManage);
adapter=new MyBackgroundRecyclerAdapter(IMAGE);
selectBackground.setAdapter(adapter);
}
private void setOnListeners() {
// TODO Auto-generated method stub
back.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(isDetail){
Intent ii=new Intent(SelectBackgroundActivity.this, AnniDetailActivity.class);
startActivity(ii);
SelectBackgroundActivity.isDetail=false;
}
SelectBackgroundActivity.this.finish();
}
});
complete.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
AnniDetailActivity.IMAGE=SelectBackgroundActivity.IMAGE;
AddAnniActivity.IMAGE=SelectBackgroundActivity.IMAGE;
TinyAnni anni=new TinyAnni();
//anni.display();
anni.setBackground(SelectBackgroundActivity.IMAGE, SelectBackgroundActivity.anniID);
//anni.display();
if(isDetail){
Intent ii=new Intent(SelectBackgroundActivity.this, AnniDetailActivity.class);
startActivity(ii);
SelectBackgroundActivity.isDetail=false;
SelectBackgroundActivity.this.finish();
}
finish();
// Intent i=new Intent(SelectBackgroundActivity.this, SelectBackgroundActivity.class);
// startActivity(i);
// finish();
//
}
});
adapter.setOnClickListener(new ItemClickListener() {
@Override
public void onItemClickListener(int position) {
// TODO Auto-generated method stub
SelectBackgroundActivity.IMAGE=position;
if(lastImage==position);
else{
bigBackground.setBackgroundResource(backgroundBig[position]);
bigFlurBackGround.setBackgroundResource(backgroundBigBlur[position]);
lastImage=position;
}
// View v1=layoutManage.findViewByPosition(position);
// View v2=layoutManage.findViewByPosition(adapter.getSelectID());
//
// ImageView i1=(ImageView)v1.findViewById(R.id.add_anni_recycler_item_select);
// i1.setVisibility(View.VISIBLE);
//
// ImageView i2=(ImageView)v2.findViewById(R.id.add_anni_recycler_item_select);
// i2.setVisibility(View.GONE);
//
// adapter.setSelectID(position);
}
});
}
}
<file_sep>package com.example.tinylove.Adapter;
import com.example.tinylove.R;
import com.example.tinylove.Adapter.MyRecyclerAdapter.ViewHolder;
import com.example.tinylove.Interface.ItemClickListener;
import com.example.tinylove.View.CustomImageView;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.RadioButton;
public class MyBackgroundRecyclerAdapter extends RecyclerView.Adapter<MyBackgroundRecyclerAdapter.ViewHolder>{
//设置纪念日背景图片
final int[] backgroundSmall={R.drawable.anni_bg_preview_1,R.drawable.anni_bg_preview_2,R.drawable.anni_bg_preview_3,R.drawable.anni_bg_preview_4,R.drawable.anni_bg_preview_5,
R.drawable.anni_bg_preview_6,R.drawable.anni_bg_preview_7,R.drawable.anni_bg_preview_8,R.drawable.anni_bg_preview_9,R.drawable.anni_bg_preview_10,
R.drawable.anni_bg_preview_11,R.drawable.anni_bg_preview_12,R.drawable.anni_bg_preview_13,R.drawable.anni_bg_preview_14,R.drawable.anni_bg_preview_15,
R.drawable.anni_bg_preview_16,R.drawable.anni_bg_preview_17,R.drawable.anni_bg_preview_18,R.drawable.anni_bg_preview_19,R.drawable.anni_bg_preview_20};
//选中背景图片的ID
private int selectID=-1;
ItemClickListener itemClickListener;
public int getSelectID() {
return selectID;
}
public void setSelectID(int selectID) {
this.selectID = selectID;
}
//选背景图片list数据初始化
public MyBackgroundRecyclerAdapter(int id){
this.selectID=id;
}
public void setOnClickListener(ItemClickListener itemClickListener){
this.itemClickListener=itemClickListener;
}
@Override
public int getItemCount() {
// TODO Auto-generated method stub
return backgroundSmall.length;
}
@Override
public void onBindViewHolder(MyBackgroundRecyclerAdapter.ViewHolder viewHolder, int position) {
// TODO Auto-generated method stub
viewHolder.llBackPicture.setImageResource(backgroundSmall[position]);
// viewHolder.imageSelect.setVisibility(View.GONE);
// viewHolder.imageSelect.setChecked(false);
// if(position==selectID){
// viewHolder.imageSelect.setVisibility(View.VISIBLE);
// viewHolder.imageSelect.setChecked(false);
// }
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup viewGroup, int position) {
// TODO Auto-generated method stub
View view=LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.add_anni_recycler_item, viewGroup,false);
return new ViewHolder(view, itemClickListener);
}
class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
//纪念日背景list_item涉及到的控件
CustomImageView llBackPicture;
//RadioButton imageSelect;
ItemClickListener listener;
public ViewHolder(View itemView,ItemClickListener itemClickListener) {
super(itemView);
// TODO Auto-generated constructor stub
//纪念日背景list_item涉及到的控件
llBackPicture=(CustomImageView)itemView.findViewById(R.id.add_anni_recycler_item_picture);
llBackPicture.setType(CustomImageView.TYPE_ROUND);
llBackPicture.setRoundRadius(10);
//imageSelect=(RadioButton)itemView.findViewById(R.id.add_anni_recycler_item_select);
itemView.setOnClickListener(this);
this.listener=itemClickListener;
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
listener.onItemClickListener(getPosition());
}
}
}
<file_sep>package com.example.tinylove.Activity;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import com.example.tinylove.R;
import com.example.tinylove.Database.TinyAnni;
import com.example.tinylove.View.SwitchView;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
public class AnniEditActivity extends Activity implements OnClickListener {
private ImageView back;
private TextView complete;
private ImageView imageSelectColor;
private EditText anniContent;
private ImageView imageSelectEventColor1;
private ImageView imageSelectEventColor2;
private ImageView imageSelectEventColor3;
private ImageView imageSelectEventColor4;
private ImageView imageSelectEventColor5;
private ImageView imageSelectEventColor6;
private ImageView imageSelectEventColor7;
private RelativeLayout rlTime;
private TextView textEventTime;
private SwitchView switchView;
private RelativeLayout rlRemind;
private TextView textRemind;
private RelativeLayout rlDelete;
private boolean isSystemAnni;
private TinyAnni anni;
public static String anniID="";
public static String DATE_YEAR="";
public static String DATE_MONTH="";
public static String DATE_DAY="";
public static String FREQUENT="";
public static int BACKCOLOR=1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_anni_edit);
anni=new TinyAnni();
anni.getData(MainActivity.currentUser);
int i=anni.getIds().indexOf(anniID);
DATE_YEAR=anni.getY().get(i);
DATE_MONTH=anni.getM().get(i);
DATE_DAY=anni.getD().get(i);
FREQUENT=anni.getFrequent(MainActivity.currentUser, anniID);
BACKCOLOR=anni.getBack().get(i);
initView();
setOnListeners();
}
private void initView() {
// TODO Auto-generated method stub
back=(ImageView)findViewById(R.id.anni_edit_back);
complete=(TextView)findViewById(R.id.anni_edit_next);
imageSelectColor=(ImageView)findViewById(R.id.anni_edit_select_color);
anniContent=(EditText)findViewById(R.id.anni_edit_event_name);
anniContent.setEnabled(true);
imageSelectEventColor1=(ImageView)findViewById(R.id.anni_edit_select_1);
imageSelectEventColor2=(ImageView)findViewById(R.id.anni_edit_select_2);
imageSelectEventColor3=(ImageView)findViewById(R.id.anni_edit_select_3);
imageSelectEventColor4=(ImageView)findViewById(R.id.anni_edit_select_4);
imageSelectEventColor5=(ImageView)findViewById(R.id.anni_edit_select_5);
imageSelectEventColor6=(ImageView)findViewById(R.id.anni_edit_select_6);
imageSelectEventColor7=(ImageView)findViewById(R.id.anni_edit_select_7);
rlTime=(RelativeLayout)findViewById(R.id.anni_edit_rl_time);
textEventTime=(TextView)findViewById(R.id.anni_edit_text_time);
switchView=(SwitchView)findViewById(R.id.anni_edit_time_switch);
switchView.setState(true);
switchView.setVisibility(View.GONE);
rlRemind=(RelativeLayout)findViewById(R.id.anni_edit_rl_remind);
rlRemind.setEnabled(true);
textRemind=(TextView)findViewById(R.id.anni_edit_remind_frequent);
rlDelete=(RelativeLayout)findViewById(R.id.anni_edit_rl_delete);
anni=new TinyAnni();
String s=anni.getAnniDate(anniID);
try {
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd");
Date d=sdf.parse(s);
s=(d.getYear()+1900)+"年"+(d.getMonth()+1)+"月"+d.getDate()+"日";
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
textEventTime.setText(s);
if(AnniEditActivity.anniID.equals("0001")||AnniEditActivity.anniID.equals("0002")||AnniEditActivity.anniID.equals("0003")||
AnniEditActivity.anniID.equals("0004")||AnniEditActivity.anniID.equals("0005")||AnniEditActivity.anniID.equals("0006")||
AnniEditActivity.anniID.equals("0007")){
anni=new TinyAnni();
isSystemAnni=true;
switch (Integer.valueOf(AnniEditActivity.anniID)) {
case 1:
anniContent.setText("我们在一起啦");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0001"));
break;
case 2:
anniContent.setText("TA的生日");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0002"));
break;
case 3:
anniContent.setText("我的生日");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0003"));
break;
case 4:
anniContent.setText("第一次拥抱的日子");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0004"));
break;
case 5:
anniContent.setText("第一次接吻的日子");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0005"));
break;
case 6:
anniContent.setText("第一次一起去旅行的日子");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0006"));
break;
case 7:
anniContent.setText("我们结婚啦");
anniContent.setEnabled(false);
rlRemind.setEnabled(false);
rlRemind.setBackgroundColor(Color.WHITE);
textRemind.setText(anni.getFrequent(MainActivity.currentUser, "0007"));
break;
default:
break;
}
}
else{
isSystemAnni=false;
anniContent.setEnabled(true);
rlRemind.setEnabled(true);
anniContent.setText(anni.getAnniContent(anniID));
textRemind.setText(anni.getFrequent(MainActivity.currentUser, anniID));
}
}
private void setOnListeners() {
// TODO Auto-generated method stub
back.setOnClickListener(this);
complete.setOnClickListener(this);
imageSelectEventColor1.setOnClickListener(this);
imageSelectEventColor2.setOnClickListener(this);
imageSelectEventColor3.setOnClickListener(this);
imageSelectEventColor4.setOnClickListener(this);
imageSelectEventColor5.setOnClickListener(this);
imageSelectEventColor6.setOnClickListener(this);
imageSelectEventColor7.setOnClickListener(this);
rlTime.setOnClickListener(this);
//switchView.setOnStateChangedListener(this);
rlRemind.setOnClickListener(this);
rlDelete.setOnClickListener(this);
}
private void setSelectColor(int id){
imageSelectEventColor1.setImageResource(R.drawable.event_color_1);
imageSelectEventColor2.setImageResource(R.drawable.event_color_2);
imageSelectEventColor3.setImageResource(R.drawable.event_color_3);
imageSelectEventColor4.setImageResource(R.drawable.event_color_4);
imageSelectEventColor5.setImageResource(R.drawable.event_color_5);
imageSelectEventColor6.setImageResource(R.drawable.event_color_6);
imageSelectEventColor7.setImageResource(R.drawable.event_color_7);
switch (id) {
case 1:
AnniEditActivity.BACKCOLOR=0;
imageSelectEventColor1.setImageResource(R.drawable.add_event_color_1);
imageSelectColor.setImageResource(R.drawable.event_color_1);
break;
case 2:
AnniEditActivity.BACKCOLOR=1;
imageSelectEventColor2.setImageResource(R.drawable.add_event_color_2);
imageSelectColor.setImageResource(R.drawable.event_color_2);
break;
case 3:
AnniEditActivity.BACKCOLOR=2;
imageSelectEventColor3.setImageResource(R.drawable.add_event_color_3);
imageSelectColor.setImageResource(R.drawable.event_color_3);
break;
case 4:
AnniEditActivity.BACKCOLOR=3;
imageSelectEventColor4.setImageResource(R.drawable.add_event_color_4);
imageSelectColor.setImageResource(R.drawable.event_color_4);
break;
case 5:
AnniEditActivity.BACKCOLOR=4;
imageSelectEventColor5.setImageResource(R.drawable.add_event_color_5);
imageSelectColor.setImageResource(R.drawable.event_color_5);
break;
case 6:
AnniEditActivity.BACKCOLOR=5;
imageSelectEventColor6.setImageResource(R.drawable.add_event_color_6);
imageSelectColor.setImageResource(R.drawable.event_color_6);
break;
case 7:
AnniEditActivity.BACKCOLOR=6;
imageSelectEventColor7.setImageResource(R.drawable.add_event_color_7);
imageSelectColor.setImageResource(R.drawable.event_color_7);
break;
default:
break;
}
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.anni_edit_back:
Intent i=new Intent(AnniEditActivity.this, AnniDetailActivity.class);
startActivity(i);
finish();
break;
case R.id.anni_edit_next:
String year=AnniEditActivity.DATE_YEAR;
String month=AnniEditActivity.DATE_MONTH;
String day=AnniEditActivity.DATE_DAY;
String color=AnniEditActivity.BACKCOLOR+"";
if(isSystemAnni){
if(year.equals("")||month.equals("")||day.equals("")){
PromptDialog.title="Error";
PromptDialog.content="还有信息未填写!";
Intent di=new Intent(AnniEditActivity.this, PromptDialog.class);
startActivity(di);
}
else{
anni=new TinyAnni();
anni.userName=MainActivity.currentUser;
anni.anniID=AnniEditActivity.anniID;
anni.anniYear=year;
anni.anniMonth=month;
anni.anniDay=day;
anni.anniColor=color;
anni.updateSystemAnni();
AnniEditActivity.DATE_YEAR="";
AnniEditActivity.DATE_MONTH="";
AnniEditActivity.DATE_DAY="";
AnniEditActivity.BACKCOLOR=1;
Intent i0=new Intent(AnniEditActivity.this, AnniDetailActivity.class);
startActivity(i0);
finish();
}
}
else{
String event=anniContent.getText().toString();
String frequent=textRemind.getText().toString();
if(year.equals("")||month.equals("")||day.equals("")||event==null||event.equals("")){
PromptDialog.title="Error";
PromptDialog.content="还有信息未填写!";
Intent di=new Intent(AnniEditActivity.this, PromptDialog.class);
startActivity(di);
}
else{
anni=new TinyAnni();
anni.userName=MainActivity.currentUser;
anni.anniID=AnniEditActivity.anniID;
anni.anniYear=year;
anni.anniMonth=month;
anni.anniDay=day;
anni.anniColor=color;
anni.anniFrequent=frequent;
anni.anniContent=event;
anni.updateAnni();
AnniEditActivity.DATE_YEAR="";
AnniEditActivity.DATE_MONTH="";
AnniEditActivity.DATE_DAY="";
AnniEditActivity.BACKCOLOR=1;
Intent i0=new Intent(AnniEditActivity.this, AnniDetailActivity.class);
startActivity(i0);
finish();
}
}
break;
case R.id.anni_edit_select_1:
setSelectColor(1);
break;
case R.id.anni_edit_select_2:
setSelectColor(2);
break;
case R.id.anni_edit_select_3:
setSelectColor(3);
break;
case R.id.anni_edit_select_4:
setSelectColor(4);
break;
case R.id.anni_edit_select_5:
setSelectColor(5);
break;
case R.id.anni_edit_select_6:
setSelectColor(6);
break;
case R.id.anni_edit_select_7:
setSelectColor(7);
break;
case R.id.anni_edit_rl_time:
Intent intentSelectDate=new Intent(AnniEditActivity.this, SelectDateDialog.class);
startActivityForResult(intentSelectDate,2);
break;
case R.id.anni_edit_rl_remind:
Intent intentSelectFrequent=new Intent(AnniEditActivity.this, SelectFrequentDialog.class);
startActivityForResult(intentSelectFrequent,3);
break;
case R.id.anni_edit_rl_delete:
if(isSystemAnni){
anni.deleteSystemAnni(anniID);
}
else{
anni.deleteAnni(anniID);
}
Intent intentList=new Intent(AnniEditActivity.this, AnniActivity.class);
startActivity(intentList);
finish();
break;
default:
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
super.onActivityResult(requestCode, resultCode, data);
if(requestCode==2&&resultCode==0){
if(AddAnniActivity.DATE_YEAR.equals("")||AddAnniActivity.DATE_MONTH.equals("")||AddAnniActivity.DATE_DAY.equals("")){
textEventTime.setText("点击这里选择日期");
}
else{
textEventTime.setText(AnniEditActivity.DATE_YEAR+"年"+AnniEditActivity.DATE_MONTH+"月"+AnniEditActivity.DATE_DAY+"日");
}
}
if(requestCode==3&&resultCode==0){
textRemind.setText(AddAnniActivity.FREQUENT);
}
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import com.example.tinylove.View.PictureView.TransitionEffect;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
public class TimeSelectModeDialog extends Activity implements OnClickListener {
private TextView[] textMode=new TextView[12];
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_time_select_mode);
textMode[0]=(TextView)findViewById(R.id.mode_0);
textMode[1]=(TextView)findViewById(R.id.mode_1);
textMode[2]=(TextView)findViewById(R.id.mode_2);
textMode[3]=(TextView)findViewById(R.id.mode_3);
textMode[4]=(TextView)findViewById(R.id.mode_4);
textMode[5]=(TextView)findViewById(R.id.mode_5);
textMode[6]=(TextView)findViewById(R.id.mode_6);
textMode[7]=(TextView)findViewById(R.id.mode_7);
textMode[8]=(TextView)findViewById(R.id.mode_8);
textMode[9]=(TextView)findViewById(R.id.mode_9);
textMode[10]=(TextView)findViewById(R.id.mode_10);
textMode[11]=(TextView)findViewById(R.id.mode_11);
textMode[0].setOnClickListener(this);
textMode[1].setOnClickListener(this);
textMode[2].setOnClickListener(this);
textMode[3].setOnClickListener(this);
textMode[4].setOnClickListener(this);
textMode[5].setOnClickListener(this);
textMode[6].setOnClickListener(this);
textMode[7].setOnClickListener(this);
textMode[8].setOnClickListener(this);
textMode[9].setOnClickListener(this);
textMode[10].setOnClickListener(this);
textMode[11].setOnClickListener(this);
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch(v.getId()){
case R.id.mode_0:
TimeFragment.effect=TransitionEffect.Standard;
finish();
break;
case R.id.mode_1:
TimeFragment.effect=TransitionEffect.Tablet;
finish();
break;
case R.id.mode_2:
TimeFragment.effect=TransitionEffect.CubeIn;
finish();
break;
case R.id.mode_3:
TimeFragment.effect=TransitionEffect.CubeOut;
finish();
break;
case R.id.mode_4:
TimeFragment.effect=TransitionEffect.FlipVertical;
finish();
break;
case R.id.mode_5:
TimeFragment.effect=TransitionEffect.FlipHorizontal;
finish();
break;
case R.id.mode_6:
TimeFragment.effect=TransitionEffect.Stack;
finish();
break;
case R.id.mode_7:
TimeFragment.effect=TransitionEffect.ZoomIn;
finish();
break;
case R.id.mode_8:
TimeFragment.effect=TransitionEffect.ZoomOut;
finish();
break;
case R.id.mode_9:
TimeFragment.effect=TransitionEffect.RotateUp;
finish();
break;
case R.id.mode_10:
TimeFragment.effect=TransitionEffect.RotateDown;
finish();
break;
case R.id.mode_11:
TimeFragment.effect=TransitionEffect.Accordion;
finish();
break;
}
}
}
<file_sep>package com.example.tinylove.Activity;
import java.util.LinkedList;
import com.example.tinylove.R;
import com.example.tinylove.Database.TinyCheck;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.webkit.WebView.FindListener;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
public class CheckActivity extends Activity implements OnClickListener {
private ImageView back;
private CheckItem[] items=new CheckItem[8];
public LinkedList<String> checkIDs;
public LinkedList<Integer> checkStates;
public LinkedList<Integer> checkTimess;
public LinkedList<String> checkLastDays;
private TinyCheck check;
public static Activity activity;
public static boolean detailIsChangeCheck;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_check);
activity=this;
get();
initView();
setOnListeners();
}
private void get(){
check=new TinyCheck();
check.getData(MainActivity.currentUser);
checkIDs=check.getCids();
checkStates=check.getCss();
checkTimess=check.getCtss();
checkLastDays=check.getClds();
check.display();
}
private void initView() {
// TODO Auto-generated method stub
back=(ImageView)findViewById(R.id.check_back);
for(int i=0;i<8;i++){
int id=CheckItem.dataItemId[i];
int id1=CheckItem.dataCheckOutId[i];
int id2=CheckItem.dataCheckInImageId[i];
int id3=CheckItem.dataCheckInSpace[i];
int id4=CheckItem.dataCheckInNum[i];
int id5=CheckItem.dataCheckInDay[i];
items[i]=new CheckItem(id, id1, id2, id3, id4, id5);
items[i].item=(RelativeLayout)findViewById(id);
items[i].textCheckOut=(TextView)findViewById(id1);
items[i].imageCheckIn=(ImageView)findViewById(id2);
items[i].viewCheckInSpace=(View)findViewById(id3);
items[i].textCheckInNum=(TextView)findViewById(id4);
items[i].textCheckInDay=(TextView)findViewById(id5);
if(check.isChecked(checkIDs.get(i))){
items[i].textCheckOut.setVisibility(View.GONE);
items[i].imageCheckIn.setVisibility(View.VISIBLE);
items[i].viewCheckInSpace.setVisibility(View.VISIBLE);
items[i].textCheckInNum.setVisibility(View.VISIBLE);
items[i].textCheckInDay.setVisibility(View.VISIBLE);
items[i].textCheckInNum.setText(checkTimess.get(i)+"");
}
else{
items[i].textCheckOut.setVisibility(View.VISIBLE);
items[i].imageCheckIn.setVisibility(View.GONE);
items[i].viewCheckInSpace.setVisibility(View.GONE);
items[i].textCheckInNum.setVisibility(View.GONE);
items[i].textCheckInDay.setVisibility(View.GONE);
}
}
}
private void setOnListeners() {
// TODO Auto-generated method stub
back.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
finish();
}
});
for(int i=0;i<8;i++){
items[i].item.setOnClickListener(this);
items[i].textCheckOut.setOnClickListener(this);
}
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
for(int i=0;i<8;i++){
if(v.getId()==items[i].id){
CheckDetailDialog.checkID=checkIDs.get(i);
CheckDetailDialog.checkId=i;
if(check.isChecked(checkIDs.get(i))){
CheckDetailDialog.isChecked=true;
}
else{
CheckDetailDialog.isChecked=false;
}
CheckDetailDialog.checkDays=checkTimess.get(i);
Intent intent=new Intent(CheckActivity.this, CheckDetailDialog.class);
startActivityForResult(intent,5);
}
if(v.getId()==items[i].checkOutId){
if(check.check(checkIDs.get(i))){
items[i].textCheckOut.setVisibility(View.GONE);
items[i].imageCheckIn.setVisibility(View.VISIBLE);
items[i].viewCheckInSpace.setVisibility(View.VISIBLE);
items[i].textCheckInNum.setVisibility(View.VISIBLE);
items[i].textCheckInDay.setVisibility(View.VISIBLE);
get();
items[i].textCheckInNum.setText(checkTimess.get(i)+"");
}
else{
PromptDialog.title="Error";
PromptDialog.content="½ñÈÕÒÑ´ò¿¨";
Intent ip=new Intent(CheckActivity.this, PromptDialog.class);
startActivity(ip);
}
}
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
super.onActivityResult(requestCode, resultCode, data);
if(requestCode==5&&resultCode==0){
if(detailIsChangeCheck){
Intent i=new Intent(CheckActivity.this, CheckActivity.class);
startActivity(i);
finish();
}
}
}
}
class CheckItem{
final static int[] dataItemId={R.id.check_morning,R.id.check_evening,R.id.check_phone,R.id.check_travel,
R.id.check_food,R.id.check_movie,R.id.check_sport,R.id.check_shop};
final static int[] dataCheckOutId={R.id.check_morning_out,R.id.check_evening_out,R.id.check_phone_out,
R.id.check_travel_out,R.id.check_food_out,R.id.check_movie_out,R.id.check_sport_out,R.id.check_shop_out};
final static int[] dataCheckInImageId={R.id.check_morning_in_image,R.id.check_evening_in_image,R.id.check_phone_in_image,
R.id.check_travel_in_image,R.id.check_food_in_image,R.id.check_movie_in_image,R.id.check_sport_in_image,R.id.check_shop_in_image};
final static int[] dataCheckInSpace={R.id.check_morning_in_space,R.id.check_evening_in_space,R.id.check_phone_in_space,
R.id.check_travel_in_space,R.id.check_food_in_space,R.id.check_movie_in_space,R.id.check_sport_in_space,R.id.check_shop_in_space};
final static int[] dataCheckInNum={R.id.check_morning_in_num,R.id.check_evening_in_num,R.id.check_phone_in_num,
R.id.check_travel_in_num,R.id.check_food_in_num,R.id.check_movie_in_num,R.id.check_sport_in_num,R.id.check_shop_in_num};
final static int[] dataCheckInDay={R.id.check_morning_in_day,R.id.check_evening_in_day,R.id.check_phone_in_day,
R.id.check_travel_in_day,R.id.check_food_in_day,R.id.check_movie_in_day,R.id.check_sport_in_day,R.id.check_shop_in_day};
final static int MORNING=0;
final static int EVENING=1;
final static int PHONE=2;
final static int TRAVEL=3;
final static int FOOD=4;
final static int MOVIE=5;
final static int SPORT=6;
final static int SHOP=7;
RelativeLayout item;
int id;
TextView textCheckOut;
int checkOutId;
ImageView imageCheckIn;
int checkInImageId;
View viewCheckInSpace;
int checkInViewId;
TextView textCheckInNum;
int checkInTextNumId;
TextView textCheckInDay;
int checkInTextDayId;
public CheckItem(int id,int id1,int id2,int id3,int id4,int id5){
this.id=id;
this.checkOutId=id1;
this.checkInImageId=id2;
this.checkInViewId=id3;
this.checkInTextNumId=id4;
this.checkInTextDayId=id5;
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import com.example.tinylove.Database.TinyCheck;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class CheckDetailDialog extends Activity {
final static int[] icon={R.drawable.checkin_morning,R.drawable.checkin_night,R.drawable.checkin_phone,R.drawable.checkin_travel,
R.drawable.checkin_eat,R.drawable.checkin_movie,R.drawable.checkin_sport,R.drawable.checkin_shop};
final static String[] content={"每天说早安","每天说晚安","煲一次电话粥","一起去旅行","一起去吃好吃的","一起去看电影","一起去健身","一起去购物"};
private ImageView imageHead;
private TextView textHead;
private ImageView imageCheckState;
private TextView textCheckState;
private TextView textDays;
public static String checkID="";
public static int checkId=0;
public static boolean isChecked=false;
public static int checkDays=0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_check_detail);
CheckActivity.detailIsChangeCheck=false;
initView();
setOnListeners();
}
private void initView() {
// TODO Auto-generated method stub
imageHead=(ImageView)findViewById(R.id.check_detail_image);
textHead=(TextView)findViewById(R.id.check_detail_text);
imageHead.setImageResource(icon[checkId]);
textHead.setText(content[checkId]);
imageCheckState=(ImageView)findViewById(R.id.check_detail_image_check);
textCheckState=(TextView)findViewById(R.id.check_detail_text_state);
textDays=(TextView)findViewById(R.id.checkin_detail_text_days);
if(isChecked){
imageCheckState.setImageResource(R.drawable.checkin_detail_state_done_icon);
textCheckState.setText("今日已打卡");
}
else{
imageCheckState.setImageResource(R.drawable.checkin_detail_state_not_icon);
textCheckState.setText("点击打卡");
}
textDays.setText(checkDays+"");
}
private void setOnListeners() {
// TODO Auto-generated method stub
imageCheckState.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(!isChecked){
TinyCheck check=new TinyCheck();
if(check.check(checkID)){
imageCheckState.setImageResource(R.drawable.checkin_detail_state_done_icon);
textCheckState.setText("今日已打卡");
textDays.setText((checkDays+1)+"");
CheckActivity.detailIsChangeCheck=true;
}
else{
Toast.makeText(CheckDetailDialog.this, "有问题!", Toast.LENGTH_SHORT).show();
}
}
}
});
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.TextView;
public class AboutUsDialog extends Activity {
public static String content="";
private TextView textAboutUs;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_about_us);
textAboutUs=(TextView)findViewById(R.id.about_us_content);
textAboutUs.setText(content);
}
}
<file_sep>package com.example.tinylove.Activity;
import java.util.LinkedList;
import com.example.tinylove.R;
import com.example.tinylove.Adapter.MyWishRecyclerAdapter;
import com.example.tinylove.Database.TinyWish;
import com.example.tinylove.Interface.ItemClickListener;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.provider.ContactsContract.CommonDataKinds.Im;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageView;
public class WishActivity extends Activity {
private ImageView back;
private ImageView more;
private RecyclerView wishList;
private RecyclerView.LayoutManager layoutManage;
private MyWishRecyclerAdapter adapter;
public static Activity activity;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_wish);
activity=this;
initView();
onSetListeners();
}
private LinkedList<String> wishIDs;
private LinkedList<String> wishTimes;
private LinkedList<String> wishContents;
private LinkedList<Integer> wishStates;
private void initView() {
// TODO Auto-generated method stub
back=(ImageView)findViewById(R.id.wish_back);
more=(ImageView)findViewById(R.id.wish_more);
wishList=(RecyclerView)findViewById(R.id.wish_recycler);
wishList.setHasFixedSize(true);
layoutManage=new LinearLayoutManager(WishActivity.this);
wishList.setLayoutManager(layoutManage);
TinyWish wish=new TinyWish();
wish.getData(MainActivity.currentUser);
wishIDs=wish.getWids();
wishTimes=wish.getWts();
wishContents=wish.getWcs();
wishStates=wish.getWss();
adapter=new MyWishRecyclerAdapter(wishContents, wishStates);
wishList.setAdapter(adapter);
}
private void onSetListeners() {
// TODO Auto-generated method stub
back.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
WishActivity.this.finish();
}
});
more.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent intentMore=new Intent(WishActivity.this, WishAddDialog.class);
startActivity(intentMore);
}
});
adapter.setOnClickListener(new ItemClickListener() {
@Override
public void onItemClickListener(int position) {
// TODO Auto-generated method stub
WishDetailDialog.wishID=wishIDs.get(position);
WishDetailDialog.wishCon=wishContents.get(position);
WishDetailDialog.wishTime=wishTimes.get(position);
WishDetailDialog.wishState=wishStates.get(position);
Intent intent=new Intent(WishActivity.this, WishDetailDialog.class);
startActivity(intent);
}
});
}
}
<file_sep>package com.example.tinylove.Activity;
import com.example.tinylove.R;
import com.example.tinylove.Database.TinyAnni;
import com.example.tinylove.Database.TinyUser;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.EditText;
public class LoginActivity extends Activity {
private EditText editName;
private EditText editPsw;
private Button btnLogin;
public static String name="";
private String psw="";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
editName=(EditText)findViewById(R.id.login_name);
editPsw=(EditText)findViewById(R.id.login_psw);
btnLogin=(Button)findViewById(R.id.login_btn);
btnLogin.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
name=editName.getText().toString();
psw=editPsw.getText().toString();
TinyUser user=new TinyUser();
if(user.login(name, psw)){
MainActivity.currentUser=user.userName;
TinyAnni anni=new TinyAnni();
HomepageFragment.DAY=anni.getDays(MainActivity.currentUser);
GuideActivity.activity.finish();
Intent intent=new Intent(LoginActivity.this, MainActivity.class);
startActivity(intent);
finish();
}
else{
PromptDialog.title="Error";
PromptDialog.content="用户名或者密码不正确!";
Intent intentError=new Intent(LoginActivity.this,PromptDialog.class);
startActivity(intentError);
}
}
});
}
}
<file_sep>package com.example.tinylove.Adapter;
import java.util.List;
import android.app.Activity;
import android.os.Parcelable;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.widget.ImageView;
public class MyViewPagerAdapter extends PagerAdapter{
private List<View> views;
private Activity activity;
private static final String SHAREDPREFERENCES_NAME = "first_pref";
public MyViewPagerAdapter(List<View> views,Activity activity) {
// TODO Auto-generated constructor stub
this.views=views;
this.activity=activity;
}
//初始化arg1位置的界面
@Override
public Object instantiateItem(View arg0, int arg1) {
// TODO Auto-generated method stub
((ViewPager)arg0).addView(views.get(arg1), 0);
return views.get(arg1);
}
//销毁arg1位置的界面
@Override
public void destroyItem(View arg0, int arg1, Object arg2) {
// TODO Auto-generated method stub
((ViewPager)arg0).removeView(views.get(arg1));
}
//判断是否由对象生成界面
@Override
public boolean isViewFromObject(View arg0,Object arg1){
return (arg0==arg1);
}
//获取页面数
@Override
public int getCount() {
// TODO Auto-generated method stub
if(views!=null){
return views.size();
}
return 0;
}
@Override
public void finishUpdate(View container) {
// TODO Auto-generated method stub
}
@Override
public void restoreState(Parcelable state, ClassLoader loader) {
// TODO Auto-generated method stub
}
@Override
public Parcelable saveState() {
// TODO Auto-generated method stub
return null;
}
@Override
public void startUpdate(View container) {
// TODO Auto-generated method stub
}
}
| ac00bc7516be04773ea01f0ce29863abec3e7f89 | [
"Java"
] | 18 | Java | jid1311644/TinyLove | 6905c4dcb339d82a9902196fc3fa08edb15d6309 | 76e3631a52f843136f59d7268c9af2185ee81a4b | |
refs/heads/master | <file_sep>import React from 'react';
import { Platform, StyleSheet, Text, View } from 'react-native';
import MapView from 'react-native-maps';
import { Marker } from 'react-native-maps';
import * as Location from 'expo-location';
import * as Permissions from 'expo-permissions';
export default class App extends React.Component {
state = {
latitude: 0,
longitude: 0,
errorMessage: null
};
componentWillMount() {
if (Platform.OS === 'android' && !Constants.isDevice) {
this.setState({
errorMessage:
'Oops, this will not work on Sketch in an Android emulator. Try it on your device!'
});
} else {
this._getLocationAsync();
}
}
_getLocationAsync = async () => {
var { status } = await Permissions.askAsync(Permissions.LOCATION);
if (status !== 'granted') {
this.setState({
errorMessage: 'Permission to access location was denied'
});
}
Location.watchPositionAsync({ distanceInterval: 5 }, location => {
this.setState({
latitude: location.coords.latitude,
longitude: location.coords.longitude
});
});
};
render() {
console.log(this.state.latitude, this.state.longitude);
return (
<MapView
style={{ flex: 1 }}
initialRegion={{
latitude: 37.78825,
longitude: -122.4324,
latitudeDelta: 0.0922,
longitudeDelta: 0.0421
}}
>
<Marker coordinate={{ longitude: 2.294481, latitude: 48.85837 }} />
<Marker
coordinate={{
longitude: this.state.longitude,
latitude: this.state.latitude
}}
/>
</MapView>
);
}
}
| 6c38fbe0ad9f434fb2dee3ce63283e6725c1f13f | [
"JavaScript"
] | 1 | JavaScript | cdoussine/rn-finding-nemo | 242b204c6b837a31a3498acfcacc35528823a133 | 0b46ef5ee0e59b1f541a88217f7ad84aff7767a9 | |
refs/heads/master | <file_sep>import { ClientEvents } from "discord.js";
export type DiscordEvent = keyof ClientEvents;
<file_sep>import {
MetadataStorage,
DiscordEvent
} from "..";
export function Once(event: DiscordEvent);
export function Once(event: string);
export function Once(event: DiscordEvent) {
return (target: Object, key: string, descriptor: PropertyDescriptor): void => {
MetadataStorage.Instance.AddOn({
class: target.constructor,
key,
params: {
from: target.constructor,
guards: [],
event,
once: true,
method: descriptor.value,
originalParams: {}
}
});
};
}
<file_sep>import { PrefixType } from "..";
export interface IDiscordParams {
prefix?: PrefixType;
commandCaseSensitive?: boolean;
importCommands?: (string | Function)[];
}
<file_sep>import { Client, CommandMessage } from "..";
export interface ClassCommand {
execute(command: CommandMessage, client: Client): Promise<any> | any;
}
<file_sep>export * from "./IOn";
export * from "./IDecorator";
export * from "./DiscordEvent";
export * from "./IAppConfig";
export * from "./LoadClass";
export * from "./IGuard";
export * from "./ICommandParams";
export * from "./CommandMessage";
export * from "./ICommandInfos";
export * from "./ICommandNotFoundParams";
export * from "./IDiscordParams";
export * from "./GuardFunction";
export * from "./IInstance";
export * from "./PrefixType";
export * from "./ClassCommand";
<file_sep><p align="center">
<br/>
<img src="https://i.imgur.com/afS1H2x.png" width="150px">
<br/>
<br/>
<h1 align="center">
<p align="center">
discord.ts (@typeit/discord)
</p>
</h1>
<p align="center">
Create your discord bot by using TypeScript and decorators!
</p>
<br/>
</p>
## Introduction
This module is built on `discord.js`, so the internal behavior (methods, properties, ...) is the same.
## ☎️ Need help ?
**[Simply join the Discord server](https://discord.gg/VDjwu8E)**
You can also find help with the [different projects that use discord.ts](https://github.com/OwenCalvin/discord.ts/network/dependents?package_id=UGFja2FnZS00Njc1MzYwNzU%3D) and in the [examples folder](https://github.com/OwenCalvin/discord.ts/tree/master/examples)
## 💾 Installation
Use [`npm`](https://www.npmjs.com/package/@typeit/discord) or `yarn` to install `typeit/discord` with the peer dependecies (`discord.js`)
you must install `reflect-metadata` for the decorators and import it at your entry point:
```sh
npm i @typeit/discord discord.js reflect-metadata
```
```typescript
import "reflect-metadata";
// start ...
```
Your tsconfig.json should look like this:
```json
{
"compilerOptions": {
"module": "commonjs",
"target": "es2017",
"noImplicitAny": false,
"sourceMap": true,
"outDir": "build",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"declaration": true,
"importHelpers": true,
"forceConsistentCasingInFileNames": true,
"lib": [
"es2017",
"esnext.asynciterable"
],
"moduleResolution": "node"
},
"exclude": [
"node_modules"
]
}
```
## 🚀 Getting started
So we start with an empty class (abstract is not necessary but this is more type-safe, the class shouldn't be initialized).
```typescript
abstract class AppDiscord {
}
```
Then you must declare it as a Discord app class with the `@Discord` decorator :
```typescript
import { Discord } from "@typeit/discord";
@Discord() // Decorate the class
abstract class AppDiscord {
}
```
We can now declare methods that will be executed whenever a Discord event is triggered.
Our methods must be decorated with the `@On(event: string)` or `@Once(event: string)` decorator.
When the event is triggered, the method is called and we receive the values (in arguments) related to the event.
Here, we receive the message instance (details below) :
```typescript
import { Discord, On } from "@typeit/discord";
@Discord()
abstract class AppDiscord {
@On("message")
private onMessage(message: Message) {
// ...
}
}
```
## Start your application
In order to start your application, you must use the DiscordTS `Client` (not the client that is provided by discord.js!).
It works the same as the discord.js's Client (same methods, properties, ...) but the `login` method is overriden and you can set the `silent` property (into `Client` initialization) in order to not log anything in the console.
```typescript
// Use the Client that are provided by @typeit/discord NOT discord.js
import { Client } from "@typeit/discord";
function start() {
const client = new Client();
client.login(
"YOUR_TOKEN",
`${__dirname}/*Discord.ts` // glob string to load the classes
);
}
start();
```
## Client payload injection
You will also receive the client instance always as the last payload:
```typescript
import {
Discord,
On,
Client
} from "@typeit/discord";
import { Message } from "discord.js";
@Discord()
abstract class AppDiscord {
@On("message")
private onMessage(
message: Message,
client: Client // Client instance injected here
) {
// ...
}
}
```
## 📟 Commands
You can simply use the `@Command` and `@CommandNotFound` decorators to implement a command system in your app.
When you use the `@Command` or the `@CommandNotFound` decorator, you should type your first parameters as a `CommandMessage`. It provides the command parameters, the prefix, and the command (specified [here](https://github.com/OwenCalvin/discord.ts/blob/master/src/Types/CommandMessage.ts)).
```typescript
import {
Discord,
On,
Client,
Command,
CommandMessage
} from "@typeit/discord";
@Discord({ prefix: "!" })
abstract class AppDiscord {
@Command("hello")
private hello(
message: CommandMessage,
client: Client
) {
// ...
}
@CommandNotFound()
private notFound(
message: CommandMessage,
client: Client
) {
// ...
}
}
```
### The command parameters
You can specify the `prefix` and the `commandCaseSensitive` on the `@Discord` and `@Command` params (you can specify only the prefix of `@CommandNotFound`). The params on the `@Command` will override those of `@Discord`.
The `@Command` decorator also have a `description` parameter and an `infos` one. The description one is useful if you have an help command to display the command description. The `infos` one can store anything you want.
**If you use different prefixes or case sensitivity, I recommend implementing multiple classes decorated by the `@Discord` parameter using different prefixes/case sensitivity, like the [multipleDiscordInstances example](https://github.com/OwenCalvin/discord.ts/tree/master/examples/multipleDiscordInstances).**
But here is an example for the different params:
```typescript
import {
Discord,
On,
Client,
Command,
CommandMessage
} from "@typeit/discord";
@Discord({ prefix: "!", commandCaseSensitive: true })
abstract class AppDiscord {
// Executed using the !HELLO command
@Command("HELLO")
private hello(message: CommandMessage) {
// ...
}
// Executed if a command with the prefix "!" isn't found
@CommandNotFound()
private notFound(message: CommandMessage) {
// ...
}
// Executed using the .helloDot command
@Command("helloDot", { prefix: "." })
private helloDot(message: CommandMessage) {
// ...
}
// Executed if a command with the prefix "." isn't found
@CommandNotFound({ prefix: "." })
private notFoundDot(message: CommandMessage) {
// ...
}
// Executed using 0ab, 0Ab, 0aB or 0AB
@Command("ab", { prefix: "0" })
private ab(message: CommandMessage) {
// ...
}
}
```
### Dynamic prefix
If you have different prefix for different servers you can use dynamic prefixes using functions like this:
```typescript
// If the message has been sent in the guild with the name MyGuildName the prefix "." will be considered otherwise the prefix "$" will trigger the action.
async function prefixBehaviour(message: Message, client: Client) {
if (message.guild.name === "MyGuildName") {
return ".";
}
return "$";
}
@Discord({ prefix: prefixBehaviour })
abstract class AppDiscord {
@Command("HELLO")
private hello(message: CommandMessage) {
// ...
}
}
```
### Retrieve the commands
You can simply get all the commands and their details using `Client.getCommands<InfoType>(forPrefix?: string): ICommandInfos[]`.
> If you specify no prefix for the `forPrefix` parameter, you will receive the details of all the commands.
```typescript
import {
Discord,
On,
Client,
Command,
CommandMessage
} from "@typeit/discord";
interface Infos {
requiredRole: string;
}
@Discord({ prefix: "!" })
abstract class AppDiscord {
@Command("hello", {
description: "The command simply say hello",
infos: { requiredRole: "master" }
})
private hello(
message: CommandMessage,
client: Client
) {
// ...
}
@Command("help")
private hello(
message: CommandMessage,
client: Client
) {
// You receive all the commands prefixed by "!"
const commands = Client.getCommands<Infos>("!");
// ...
}
@CommandNotFound()
private notFound(
message: CommandMessage,
client: Client
) {
// ...
}
}
```
### Command directory pattern
> [Example](https://github.com/OwenCalvin/discord.ts/tree/master/examples/commandsDir)
If you have a directory pattern that looks like this:
```shell
Main.ts
DiscordApp.ts
commands
- Ping.ts
- Hello.ts
- Blabla.ts
```
You should use the `importCommands` parameter for the `@Discord` decorator.
Here, all of the commands will be injected into this Discord class instance.
```typescript
import {
Discord,
CommandNotFound
} from "@typeit/discord";
@Discord({
prefix: "!", // The Discord parameters will be applied to the imported commands
importCommands: [
Path.join(__dirname, "commands", "*.ts")
// You can also specify the class directly here if you don't want to use a glob
]
})
export abstract class DiscordApp {
@CommandNotFound({ prefix: "!" })
notFoundA(commad: CommandMessage) {
commad.reply("Command not found");
}
}
```
Here is an example of what your command file should look like:
```typescript
import { ClassCommand, Command, CommandMessage } from "@typeit/discord";
export abstract class Bye implements ClassCommand {
@Command("bye")
async execute(command: CommandMessage) {
command.reply("Bye!");
}
}
```
### Set commands paramaters programmaticaly
If you are forced to change the prefix during the execution or if it's loaded from a file when your app start, you can use two methods (it returns `true` if the params changed):
- `Client.setDiscordParams(discordInstance: InstanceType<any>, params: IDiscordParams): boolean`
- `Client.setCommandParams(discordInstance InstanceType<any>, method: Function, params: ICommandParams): boolean`
>I recommend not specifying the prefix inside the decorator if you use one of these two methods because it wouldn't be consistent
```typescript
import {
Discord,
On,
Client,
Command,
CommandMessage
} from "@typeit/discord";
@Discord({ prefix: "!" })
abstract class AppDiscord {
@Command("prefix")
private prefix(message: CommandMessage) {
// Will change the prefix of all the @Command methods of this @Discord instance
Client.setDiscordParams(this, {
prefix: message.params[0]
});
}
@Command("changeMyPrefix")
private changeMyPrefix(message: CommandMessage) {
// Will change the prefix of the changeMyPrefix method
Client.setCommandParams(this, this.changeMyPrefix, {
prefix: message.params[0]
});
}
}
```
The command class should look like that:
```typescript
import {
ClassCommand,
Command,
CommandMessage
} from "@typeit/discord";
export abstract class Bye implements ClassCommand {
@Command()
async execute(command: CommandMessage) {
command.reply("Bye!");
}
}
```
## ⚔️ Guards
> Guards works also with `@Command` and `@CommandNotFound`
You can use functions that are executed before your event to determine if it's executed. For example, if you want to apply a prefix to the messages, you can simply use the `@Guard` decorator:
(The `Prefix` function is provided by the `@typeit/discord` package, where you can import it)
```typescript
import {
Discord,
On,
Client,
Guard,
Prefix
} from "@typeit/discord";
import {
Message
} from "discord.js";
import {
NotBot
} from "./NotBot";
@Discord()
abstract class AppDiscord {
@On("message")
@Guard(
NotBot, // You can use multiple guard functions, they are excuted in the same order!
Prefix("!")
)
async onMessage(message: Message) {
switch (message.content.toLowerCase()) {
case "hello":
message.reply("Hello!");
break;
default:
message.reply("Command not found");
break;
}
}
}
```
### The guard functions
Here is a simple example of a guard function (the payload and the client instance are injected like for events)
- If the function returns `false`: the next guards and the event function aren't executed
- If the function returns `true`: it continues the executions of the next guards
```typescript
import { Client } from "typeit/discord";
import { Message } from "discord.js";
export function NotBot(message: Message, client: Client) {
return client.user.id !== message.author.id;
}
```
If you have to indicate parameters for a guard function (like for the `Prefix` guard) you can simple use the "function that returns a function" pattern like this:
```typescript
import { Client } from "typeit/discord";
import { Message } from "discord.js";
export function Prefix(text: string, replace: boolean = true) {
return (message: Message, client: Client) => {
const startWith = message.content.startsWith(text);
if (replace) {
message.content = message.content.replace(text, "");
}
return startWith;
};
}
```
## 💡 Events and payload
Here you have the details about the payloads that are injected into the method related to a specific event.
Be aware that the types must be imported from discord.js (except for `Client`).
In this example of the event `"channelUpdate"`, we receive two payloads from the event :
> it works for `@Once(event: DiscordEvent)` too
```typescript
@Discord()
abstract class AppDiscord {
@On("channelUpdate")
private onChannelUpdate(
oldChannel: Channel, // first one
newChannel: Channel // second one
) {
// ...
}
}
```
Here is all the `DiscordEvents` and their parameters (`discord.js` version 12.2.0)
> Example for the first one:
> `@On("`**channelCreate**`")`
> `onChannelCreate(`**channel: Channel**`) { }`
- **channelCreate**: `(Channel)`
- **channelDelete**: `(Channel | PartialDMChannel)`
- **channelPinsUpdate**: `(Channel | PartialDMChannel, Date)`
- **channelUpdate**: `(Channel, Channel)`
- **debug**: `(string)`
- **warn**: `(string)`
- **disconnect**: `(any, number)`
- **emojiCreate**: `(GuildEmoji)`
- **emojiDelete**: `(GuildEmoji)`
- **emojiUpdate**: `(GuildEmoji, GuildEmoji)`
- **error**: `(Error)`
- **guildBanAdd**: `(Guild, User | PartialUser)`
- **guildBanRemove**: `(Guild, User | PartialUser)`
- **guildCreate**: `(Guild)`
- **guildDelete**: `(Guild)`
- **guildUnavailable**: `(Guild)`
- **guildIntegrationsUpdate**: `(Guild)`
- **guildMemberAdd**: `(GuildMember | PartialGuildMember)`
- **guildMemberAvailable**: `(GuildMember | PartialGuildMember)`
- **guildMemberRemove**: `(GuildMember | PartialGuildMember)`
- **guildMembersChunk**: `(Collection<Snowflake, GuildMember | PartialGuildMember>, Guild)`
- **guildMemberSpeaking**: `(GuildMember | PartialGuildMember, Readonly<Speaking>)`
- **guildMemberUpdate**: `(GuildMember | PartialGuildMember, GuildMember | PartialGuildMember)`
- **guildUpdate**: `(Guild, Guild)`
- **inviteCreate**: `(Invite)`
- **inviteDelete**: `(Invite)`
- **message**: `(Message)`
- **messageDelete**: `(Message | PartialMessage)`
- **messageReactionRemoveAll**: `(Message | PartialMessage)`
- **messageReactionRemoveEmoji**: `(MessageReaction)`
- **messageDeleteBulk**: `(Collection<Snowflake, Message | PartialMessage>)`
- **messageReactionAdd**: `(MessageReaction, User | PartialUser)`
- **messageReactionRemove**: `(MessageReaction, User | PartialUser)`
- **messageUpdate**: `(Message | PartialMessage, Message | PartialMessage)`
- **presenceUpdate**: `(Presence | undefined, Presence)`
- **rateLimit**: `(RateLimitData)`
- **ready**: `()`
- **invalidated**: `()`
- **roleCreate**: `(Role)`
- **roleDelete**: `(Role)`
- **roleUpdate**: `(Role, Role)`
- **typingStart**: `(Channel | PartialDMChannel, User | PartialUser)`
- **userUpdate**: `(User | PartialUser, User | PartialUser)`
- **voiceStateUpdate**: `(VoiceState, VoiceState)`
- **webhookUpdate**: `(TextChannel)`
- **shardDisconnect**: `(CloseEvent, number)`
- **shardError**: `(Error, number)`
- **shardReady**: `(number)`
- **shardReconnecting**: `(number)`
- **shardResume**: `(number, number)`
## Examples
An example is provided in the [`/examples` folder](https://github.com/OwenCalvin/DiscordTS/tree/master/examples) !
## Migration v1 to v2
You should just add parenthesis after the `@Discord` decorator, everywhere in your app.
`@Discord class X` should now be `@Discord() class X`.
## See also
- [discord.js](https://discord.js.org/#/)
<file_sep>import { PrefixType } from ".";
export interface ICommandNotFoundParams {
prefix?: PrefixType;
}
<file_sep>import { Client } from "..";
import { Message } from "discord.js";
export type PrefixType = string | ((params: Message, client: Client) => Promise<string> | string);
<file_sep>import { PrefixType } from ".";
export interface ICommandInfos<InfoType = any> {
prefix: PrefixType;
commandName: string;
description: string;
infos: InfoType;
}
<file_sep>import { IDiscordParams, PrefixType } from "..";
export interface IInstance extends IDiscordParams {
instance?: Function;
prefix: PrefixType;
}
<file_sep>import {
MetadataStorage,
ICommandNotFoundParams
} from "..";
export function CommandNotFound();
export function CommandNotFound(params: ICommandNotFoundParams);
export function CommandNotFound(params?: ICommandNotFoundParams) {
const definedParams = params || {};
return (target: Object, key: string, descriptor: PropertyDescriptor): void => {
MetadataStorage.Instance.AddOn({
class: target.constructor,
key,
params: {
from: target.constructor,
commandName: "",
prefix: definedParams.prefix,
guards: [],
event: "message",
once: false,
method: descriptor.value,
originalParams: definedParams
}
});
};
}
<file_sep>import {
MetadataStorage,
ICommandParams
} from "..";
export function Command();
export function Command(commandName: string);
export function Command(params: ICommandParams);
export function Command(commandName: string, params: ICommandParams);
export function Command(commandNameOrParams?: string | ICommandParams, params?: ICommandParams) {
const isCommandName = typeof commandNameOrParams === "string";
let definedParams = params || {};
let definedCommandName: string;
if (!isCommandName) {
if (commandNameOrParams) {
definedParams = commandNameOrParams as ICommandParams;
} else {
definedParams = {};
}
} else {
definedCommandName = commandNameOrParams as string;
}
return (target: Object, key: string, descriptor: PropertyDescriptor): void => {
const method = descriptor.value;
MetadataStorage.Instance.AddOn({
class: target.constructor,
key,
params: {
from: target.constructor,
commandName: definedCommandName || key,
guards: [],
event: "message",
once: false,
method,
...definedParams,
commandCaseSensitive: definedParams.commandCaseSensitive || false,
prefix: definedParams.prefix,
originalParams: {
...definedParams,
commandName: definedCommandName
}
}
});
};
}
<file_sep>import { GuardFunction } from ".";
export interface IGuard {
fn: GuardFunction;
method: Function;
}
<file_sep>import { ICommandNotFoundParams } from "./ICommandNotFoundParams";
export interface ICommandParams extends ICommandNotFoundParams {
commandCaseSensitive?: boolean;
description?: string;
infos?: any;
}
<file_sep>import { CommandNotFound, Discord, CommandMessage } from "../../../src";
import * as Path from "path";
import { Bye } from "../commands/Bye";
@Discord({
prefix: "!",
importCommands: [
Path.join(__dirname, "..", "commands", "*.ts"),
Bye
]
})
export class DiscordApp {
@CommandNotFound({ prefix: "!" })
notFoundA(commad: CommandMessage) {
commad.reply("Command not found");
}
}
<file_sep>export interface IAppConfig {
classes?: Function[];
}
<file_sep>import { ClassCommand, Command, CommandMessage } from "../../../src";
export abstract class Hello implements ClassCommand {
@Command("hello")
async execute(command: CommandMessage) {
command.reply("Hello!");
}
}
<file_sep>import {
MetadataStorage,
GuardFunction
} from "..";
export function Guard(...fns: GuardFunction[]) {
return (target: Object, key: string, descriptor: PropertyDescriptor): void => {
fns.reverse().map((fn) => {
MetadataStorage.Instance.AddGuard({
class: target.constructor,
key,
params: {
fn,
method: descriptor.value
}
});
});
};
}
<file_sep>import {
MetadataStorage,
IDiscordParams
} from "..";
import * as Glob from "glob";
function importCommand(classType: Function, target: Object) {
const ons = MetadataStorage.Instance.Ons.filter((on) => {
return on.class === classType;
});
ons.map((on) => {
on.class = target;
on.params.commandName = on.params.originalParams.commandName || classType.name;
on.params.from = classType;
});
}
export function Discord();
export function Discord(params: IDiscordParams);
export function Discord(params?: IDiscordParams) {
const definedParams = params || {};
return (target: Object) => {
if (definedParams.importCommands) {
definedParams.importCommands.map((cmd) => {
if (typeof cmd === "string") {
const files = Glob.sync(cmd);
files.map((file) => {
let classType;
const classImport = require(file);
if (classImport.default) {
classType = classImport.default;
} else {
const key = Object.keys(classImport)[0];
classType = classImport[key];
}
importCommand(classType, target);
});
} else {
importCommand((cmd as any).execute, target);
}
});
}
MetadataStorage.Instance.AddInstance({
class: target,
key: target.constructor.name,
params: {
prefix: definedParams.prefix || "",
commandCaseSensitive: definedParams.commandCaseSensitive || false
}
});
};
}
<file_sep>import {
IOn,
IDecorator,
IInstance,
IGuard,
Client,
Prefix,
CommandMessage,
IDiscordParams,
ICommandParams,
ICommandInfos
} from "..";
export class MetadataStorage {
private static _instance: MetadataStorage;
private _ons: IDecorator<IOn>[] = [];
private _guards: IDecorator<IGuard>[] = [];
private _instances: IDecorator<IInstance>[] = [];
static get Instance() {
if (!this._instance) {
this._instance = new MetadataStorage();
}
return this._instance;
}
get Ons() {
return this.getReadonlyArray(this._ons);
}
AddOn(on: IDecorator<IOn>) {
this._ons.push(on);
}
AddGuard(guard: IDecorator<IGuard>) {
this._guards.push(guard);
}
AddInstance(classType: IDecorator<IInstance>) {
this._instances.push({
...classType,
params: {
instance: new classType.class(),
...classType.params
}
});
}
Build(client: Client) {
const commands = this._ons.reduce<string[]>((prev, on) => {
if (on.params.commandName) {
prev.push(on.params.commandName);
}
return prev;
}, []);
this._ons.map((on) => {
on.params.guards = this._guards.reverse().filter((guard) => {
return (
guard.class === on.params.from &&
guard.params.method === on.params.method
);
}, []);
on.params.guardFn = async (client: Client, ...params: any) => {
let res = true;
for (const fn of on.params.guards) {
if (res) {
res = await fn.params.fn(...params, client);
} else {
break;
}
}
return res;
};
const instance = this._instances.find((instance) => instance.class === on.class);
if (instance) {
on.params.linkedInstance = instance;
}
on.params.compiledMethod = async (...params: any[]) => {
let command: IDecorator<IOn> = on;
let execute = true;
if (on.params.event === "message") {
if (on.params.commandName !== undefined) {
execute = false;
const message = params[0] as CommandMessage;
let prefix = on.params.prefix || on.params.linkedInstance.params.prefix;
if (typeof prefix === "function") {
prefix = await prefix(message, client);
}
if (Prefix(prefix)(message)) {
if (message.author.id !== client.user.id) {
const params = message.content.split(" ");
let testedCommand = params[0].replace(prefix, "");
let commandName = on.params.commandName;
let allCommands = commands;
const lowerCommands = allCommands.map((command) => command.toLowerCase());
const notFoundFn = this._ons.find((cmd) => {
return (
(cmd.params.prefix || cmd.params.linkedInstance.params.prefix) === prefix &&
cmd.params.commandName === ""
);
});
if (testedCommand.toLowerCase() === commandName.toLowerCase()) {
const originalCommand = testedCommand;
message.prefix = prefix;
message.command = testedCommand;
message.commandWithPrefix = prefix + testedCommand;
message.originalCommand = originalCommand;
message.originalCommandWithPrefix = prefix + originalCommand;
message.params = params;
message.params.splice(0, 1);
if (
!on.params.linkedInstance.params.commandCaseSensitive &&
!on.params.commandCaseSensitive &&
on.params.commandCaseSensitive !== undefined
) {
testedCommand = testedCommand.toLowerCase();
commandName = commandName.toLowerCase();
allCommands = lowerCommands;
}
if (allCommands.indexOf(testedCommand) === -1) {
if (notFoundFn) {
command = notFoundFn;
execute = true;
}
}
} else {
if (lowerCommands.indexOf(testedCommand.toLowerCase()) === -1) {
testedCommand = "";
}
}
if (testedCommand === commandName) {
execute = true;
command = on;
}
}
}
}
}
if (execute) {
const executeMain = await command.params.guardFn(client, ...params);
if (executeMain) {
return await this.executeBindedOn(command, params, client);
}
}
};
});
}
setDiscordParams(discordInstance: InstanceType<any>, params: IDiscordParams): boolean {
const discord = this._instances.find((instance) => instance.params.instance === discordInstance);
if (discord) {
discord.params = {
...discord.params,
...params
};
return true;
}
return false;
}
getPrefix(command: IOn) {
if (command.prefix || command.linkedInstance) {
return command.prefix || command.linkedInstance.params.prefix;
}
}
getCommands<InfoType = any>(forPrefix?: string) {
return this.getCommandsIntrospection(forPrefix).map<ICommandInfos<InfoType>>((command) => {
const prefix = this.getPrefix(command);
if (prefix) {
return {
prefix,
commandName: command.commandName,
description: command.description,
infos: command.infos
};
}
});
}
getCommandsIntrospection(forPrefix?: string) {
return this._ons.reduce<IOn[]>((prev, on) => {
if (on.params.commandName) {
if (forPrefix) {
const prefix = on.params.prefix || on.params.linkedInstance.params.prefix;
if (forPrefix === prefix) {
prev.push(on.params);
}
} else {
prev.push(on.params);
}
}
return prev;
}, []);
}
setCommandParams(discordInstance: InstanceType<any>, instanceMethod: Function, params: ICommandParams): boolean {
const on = this._ons.find((on) => {
let cond = on.class === discordInstance;
if (on.params.linkedInstance) {
cond = on.params.linkedInstance.params.instance === discordInstance;
}
return (
cond &&
on.params.method === instanceMethod
);
});
if (on) {
on.params = {
...on.params,
...params
};
return true;
}
return false;
}
compileOnForEvent(
event: string,
client: Client,
once: boolean = false
) {
const ons = this._ons.filter(on => (
on.params.event === event &&
on.params.once === once
));
return async (...params: any[]) => {
for (const on of ons) {
await on.params.compiledMethod(...params, client);
}
};
}
private executeBindedOn(on: IDecorator<IOn>, params: any[], client: Client) {
if (on.params.linkedInstance && on.params.linkedInstance.params.instance) {
return on.params.method.bind(on.params.linkedInstance.params.instance)(...params, client);
} else {
return on.params.method(...params, client);
}
}
private getReadonlyArray<Type>(array: Type[]) {
return array as ReadonlyArray<Type>;
}
}
<file_sep>import { ClientOptions } from "discord.js";
export interface IClientOptions extends ClientOptions {
silent?: boolean;
}
<file_sep>export type GuardFunction =
((...params: any[]) => Promise<boolean> | boolean | Promise<undefined> | undefined);
<file_sep>export interface IDecorator<Type> {
class: any;
key: string;
params: Type;
}
<file_sep>import { Client } from "../../../src";
import { Message } from "discord.js";
export function NotBot(message: Message, client: Client) {
return client.user.id !== message.author.id;
}
<file_sep>export * from "./Prefix";
<file_sep>export * from "./On";
export * from "./Once";
export * from "./Guard";
export * from "./Discord";
export * from "./Command";
export * from "./CommandNotFound";
<file_sep>import { ClassCommand, Command, CommandMessage } from "../../../src";
export abstract class Bye implements ClassCommand {
@Command("bye")
async execute(command: CommandMessage) {
command.reply("Bye!");
}
}
<file_sep>import { Client as ClientJS } from "discord.js";
import * as Glob from "glob";
import {
MetadataStorage,
LoadClass,
IDiscordParams,
ICommandParams
} from ".";
import { IClientOptions } from "./Types/ClientOptions";
export class Client extends ClientJS {
private _silent: boolean;
private _loadClasses: LoadClass[] = [];
private _loadedOnEvents: string[] = [];
private _loadedOnceEvents: string[] = [];
get silent() {
return this._silent;
}
set silent(value: boolean) {
this._silent = value;
}
constructor(options?: IClientOptions) {
super(options);
if (options) {
this.silent = options.silent;
}
}
static setDiscordParams(discordInstance: InstanceType<any>, params: IDiscordParams): boolean {
return MetadataStorage.Instance.setDiscordParams(discordInstance, params);
}
static setCommandParams(discordInstance: InstanceType<any>, instanceMethod: Function, params: ICommandParams): boolean {
return MetadataStorage.Instance.setCommandParams(discordInstance, instanceMethod, params);
}
static getCommandsIntrospection(forPrefix?: string) {
return MetadataStorage.Instance.getCommandsIntrospection(forPrefix);
}
static getCommands<InfoType = any>(forPrefix?: string) {
return MetadataStorage.Instance.getCommands<InfoType>(forPrefix);
}
/**
* Start your bot
* @param token The bot token
* @param loadClasses A list of glob path or classes
*/
login(token: string, ...loadClasses: LoadClass[]) {
this._loadClasses = loadClasses;
this.loadClasses();
MetadataStorage.Instance.Build(this);
MetadataStorage.Instance.Ons.map(async (on) => {
if (
on.params.once &&
this._loadedOnceEvents.indexOf(on.params.event) === -1
) {
this.once(
"warn",
MetadataStorage.Instance.compileOnForEvent(
on.params.event,
this,
true
)
);
this._loadedOnceEvents.push(on.params.event);
} else if (this._loadedOnEvents.indexOf(on.params.event) === -1) {
this.on(
on.params.event,
MetadataStorage.Instance.compileOnForEvent(
on.params.event,
this
)
);
this._loadedOnEvents.push(on.params.event);
}
if (!this.silent) {
let eventName = on.params.event;
if (on.params.commandName !== undefined) {
const prefix = MetadataStorage.Instance.getPrefix(on.params);
if (prefix) {
let commandName = on.params.commandName;
if (!on.params.commandCaseSensitive && !on.params.linkedInstance.params.commandCaseSensitive) {
commandName = commandName.toLowerCase();
}
if (on.params.commandName === "") {
eventName += ` (Command not found "${prefix}")`;
} else {
eventName += ` (Command "${prefix}${on.params.commandName}")`;
}
}
}
console.log(`${eventName}: ${on.params.from.name}.${on.key}`);
}
});
return super.login(token);
}
private loadClasses() {
if (this._loadClasses) {
this._loadClasses.map((file) => {
if (typeof file === "string") {
const files = Glob.sync(file);
files.map((file) => {
require(file);
});
}
});
}
}
}
<file_sep>import { ClassCommand, Command, CommandMessage, Guard } from "../../../src";
import { Say } from "../guards/Say";
export default abstract class Ping implements ClassCommand {
@Guard(Say("Pong"))
@Command({ description: "Ping pong", commandCaseSensitive: true })
async execute(command: CommandMessage) {
command.reply("Pong");
}
}
<file_sep>import {
DiscordEvent,
IInstance,
IDecorator,
IGuard,
ICommandParams
} from ".";
export interface IOn extends ICommandParams {
commandName?: string;
event: DiscordEvent;
method: (...params: any[]) => void;
compiledMethod?: (...params: any[]) => void;
linkedInstance?: IDecorator<IInstance>;
once: boolean;
guards: IDecorator<IGuard>[];
guardFn?: (...params: any[]) => Promise<any>;
from: Function;
originalParams: Partial<IOn>;
}
<file_sep>import { Message } from "discord.js";
export function Prefix(text: string, replace: boolean = false) {
return (message: Message) => {
const startWith = message.content.startsWith(text);
if (replace) {
message.content = message.content.replace(text, "");
}
return startWith;
};
}
<file_sep>import {
Discord,
On,
Client, // Use the Client that is provided by @typeit/discord NOT discord.js
Guard,
Prefix,
Command,
CommandNotFound,
CommandMessage
} from "../../src";
// You must import the types from discord.js
import {
Message
} from "discord.js";
import { NotBot } from "./guards/NotBot";
import { Say } from "./guards/Say";
enum Answers {
hello = "Hello",
notFound = "Command not found...",
prefix = "Prefix changed"
}
// Decorate the class with the @Discord() decorator
// You can specify the prefix for the @Command() decorator
@Discord({ prefix: "!" })
export class AppDiscord {
private static _client: Client;
static start() {
this._client = new Client();
// In the login method, you must specify the glob string to load your classes (for the framework).
// In this case that's not necessary because the entry point of your application is this file.
this._client.login(
"YOUR_TOKEN",
`${__dirname}/*Discord.ts` // glob string to load the classes
);
console.log(Client.getCommands());
}
// When the "message" event is triggered, this method is called with a specific payload (related to the event)
@On("message")
@Guard(
NotBot,
Prefix(".", true)
)
async onMessage(message: Message) {
switch (message.content) {
case "hello":
message.reply(Answers.hello);
break;
case "hello2":
message.reply(Answers.hello + "2");
break;
default:
message.reply(Answers.notFound);
break;
}
}
// The onMessage method but with @Command() decorator
@Guard(Say("hello"))
@Command("hello", {
description: "asdkjad",
commandCaseSensitive: true,
infos: { infoA: "my info" }
})
hello(command: CommandMessage) {
command.reply(Answers.hello);
}
@Guard(Say("set prefix"))
@Command("setPrefix", { commandCaseSensitive: true })
changePrefix(command: CommandMessage) {
Client.setDiscordParams(this, {
prefix: command.params[0]
});
command.reply(Answers.prefix);
}
@Guard(Say("command not found"))
@CommandNotFound()
notFound(command: CommandMessage) {
command.reply(Answers.notFound + ".");
}
@Guard(Say("hello comma"))
@Command("heLLo", {
prefix: async (message: Message, client: Client) => {
if (message.guild.name === "dev") {
return ",";
}
return "-";
}
})
helloComma(command: CommandMessage) {
command.reply(Answers.hello + " comma");
}
@Guard(Say("command not found comma"))
@CommandNotFound({ prefix: "," })
notFoundComma(command: CommandMessage) {
command.reply(Answers.notFound + " comma");
}
@Guard(Say("change me"))
@Command("changeMe", { prefix: "$", commandCaseSensitive: true })
changeMyPrefix(command: CommandMessage) {
Client.setCommandParams(this, this.changeMyPrefix, {
prefix: command.params[0]
});
command.reply(Answers.prefix);
}
}
// Start your app
AppDiscord.start();
<file_sep>import { Message } from "discord.js";
import { PrefixType } from ".";
export interface CommandMessage extends Message {
params: string[];
command: string;
commandWithPrefix: string;
originalCommand: string;
originalCommandWithPrefix: string;
prefix: PrefixType;
}
| 4c54290734211f6798b5963b51c5a7fb7cc219a7 | [
"Markdown",
"TypeScript"
] | 33 | TypeScript | phen0menon/discord.ts | cbff07cae1e794851e55d07bb6cb7dca2bfac593 | 36596b8b2a8cc80b11297494c677e7366547a528 | |
refs/heads/master | <file_sep>package andar.tower.defense.model;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Vector;
import andar.tower.defense.GameContext;
import andar.tower.defense.parser.Group;
import andar.tower.defense.parser.Material;
public class Model implements Serializable {
// position/rotation/scale
private static final String tag = "Model";
public String name;
private ParsedObjModel parsedObjModel; // the meshes and textures
public Model3D model3D;
public float xrot = 90;
public float yrot = 90;
public float zrot = 0;
/* in normal view distance you can see a model
* on screen in a range of x/y: (-30..+30)/(-30..+30) */
public float xpos = 0;
public float ypos = 0;
public float zpos = 0;
/* rotate/scale a model to "normal" at start */
private float defaultXRot;
private float defaultYRot;
private float defaultScale;
public float scale = 4f;
public static final int STATE_DYNAMIC = 0;
public static final int STATE_FINALIZED = 1;
private boolean hidden = false;
// timestamp of last position-update in milliseconds
private double lastPosUpdate = 0;
private Vector<Group> groups = new Vector<Group>();
/**
* all materials
*/
protected HashMap<String, Material> materials = new HashMap<String, Material>();
protected GameContext gameContext;
public Model(GameContext gameContext, ParsedObjModel parsedObjModel, String patternName) {
this.gameContext = gameContext;
this.name = parsedObjModel.name;
setParsedObjModel(parsedObjModel);
Model3D model3d = new Model3D(this, parsedObjModel, patternName);
this.model3D = model3d;
adjustModel(0f,0f,0f);
}
/**
* calculate new position on path depending on velocity
*/
public void positionUpdate() {
}
/**
* Rotate/scale a model to "normal" at start.
* @param defaultXRot
* @param defaultYRot
* @param defaultScale
*/
public void adjustModel(float defaultXRot, float defaultYRot, float defaultScale) {
this.defaultXRot = defaultXRot;
this.defaultYRot = defaultYRot;
this.defaultScale = defaultScale;
}
public void setScale(float f) {
this.scale = defaultScale * f;
if (this.scale < 0.0001f)
this.scale = 0.0001f;
}
public float getScale() {
return scale;
}
public void setXrot(float dY) {
this.xrot = defaultXRot + dY;
}
public void setYrot(float dX) {
this.yrot = defaultYRot + dX;
}
public void addXpos(float f) {
this.xpos += f;
}
public void addYpos(float f) {
this.ypos += f;
}
/**
* get hit/shot: a model that aims at this one reaches its destination
* @param hitpoints how hard it got hit
* @param gameContext
*/
protected void hit(int hitpoints, GameContext gameContext) {
/* implementation for center:
* the enemy reaches the center before getting destroyed
*/
gameContext.enemyReachesDestination(hitpoints);
}
public ParsedObjModel getParsedObjModel() {
return parsedObjModel;
}
public void setParsedObjModel(ParsedObjModel parsedObjModel) {
this.parsedObjModel = parsedObjModel;
}
public boolean isHidden() {
return hidden;
}
public void setHidden(boolean hidden) {
this.hidden = hidden;
}
}
<file_sep>package andar.tower.defense.model;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import andar.tower.defense.GameContext;
import andar.tower.defense.GameActivity.Config;
import andar.tower.defense.parser.ObjParser;
import andar.tower.defense.util.BaseFileUtil;
import android.graphics.Point;
import android.net.ParseException;
import android.os.Debug;
import android.util.Log;
import edu.dhbw.andar.ARToolkit;
import edu.dhbw.andar.exceptions.AndARException;
/**
* Manage a pool of models for reuse
*
* @author jakob
*
*/
public class ModelPool {
private GameContext gameContext;
private ArrayList<Enemy> activeAirplanes = new ArrayList<Enemy>();
private ArrayList<Enemy> inactiveAirplanes = new ArrayList<Enemy>();
private ArrayList<Enemy> activeTanks = new ArrayList<Enemy>();
private ArrayList<Enemy> inactiveTanks = new ArrayList<Enemy>();
private ArrayList<Enemy> activeBullets = new ArrayList<Enemy>();
private ArrayList<Enemy> inactiveBullets = new ArrayList<Enemy>();
private ArrayList<Tower> activeTowers = new ArrayList<Tower>();
private BaseFileUtil fileUtil;
private ObjParser parser;
public static final String CENTER_PATTERN = "marker_fisch16.patt";
private final ParsedObjModel AIRPLANE_OBJMODEL, TANK_OBJMODEL,
BULLET_OBJMODEL, TOWER_OBJMODEL;
private Model center;
private ARToolkit artoolkit;
private static final String tag = "EnemyPool";
public ModelPool(GameContext gameContext, ARToolkit artoolkit, BaseFileUtil fileUtil) {
this.gameContext = gameContext;
this.artoolkit = artoolkit;
this.fileUtil = fileUtil;
parser = new ObjParser(fileUtil);
center = loadCenter();
AIRPLANE_OBJMODEL = loadModelFromFile("Airplane.obj");
TANK_OBJMODEL = loadModelFromFile("tank3.obj");
BULLET_OBJMODEL = loadModelFromFile("bullet.obj");
TOWER_OBJMODEL = loadModelFromFile("Tower.obj");
}
private synchronized Enemy getEnemy(int type, int health, int velocity, ArrayList<Point> way, Point startPoint) {
ArrayList<Enemy> activeList = null;
ArrayList<Enemy> inactiveList = null;
ParsedObjModel parsedObjModel = null;
switch (type) {
case Enemy.AIRPLANE:
activeList = activeAirplanes;
inactiveList = inactiveAirplanes;
parsedObjModel = AIRPLANE_OBJMODEL;
break;
case Enemy.TANK:
activeList = activeTanks;
inactiveList = inactiveTanks;
parsedObjModel = TANK_OBJMODEL;
break;
case Enemy.BULLET:
activeList = activeBullets;
inactiveList = inactiveBullets;
parsedObjModel = BULLET_OBJMODEL;
break;
default:
break;
}
Enemy enemy;
if (inactiveList.size() == 0) {
enemy = new Enemy(gameContext, type, parsedObjModel, CENTER_PATTERN, null, center,
health, velocity);
try {
artoolkit.registerARObject(enemy.model3D);
} catch (AndARException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
enemy = inactiveList.remove(0);
enemy.setHidden(false);
}
enemy.xpos = startPoint.x;
enemy.ypos = startPoint.y;
enemy.way = way;
activeList.add(enemy);
return enemy;
}
public synchronized void dismissEnemy(int type, Enemy enemy) {
enemy.setHidden(true);
ArrayList<Enemy> activeList = null;
ArrayList<Enemy> inactiveList = null;
switch (type) {
case Enemy.AIRPLANE:
activeList = activeAirplanes;
inactiveList = inactiveAirplanes;
break;
case Enemy.TANK:
activeList = activeTanks;
inactiveList = inactiveTanks;
break;
case Enemy.BULLET:
activeList = activeBullets;
inactiveList = inactiveBullets;
break;
default:
break;
}
if (activeList.contains(enemy)) {
activeList.remove(enemy);
inactiveList.add(enemy);
}
}
public Enemy getAirplane() {
Point startPoint = randomWayPoint(60);
ArrayList<Point> way = new ArrayList<Point>();
way.add(new Point(0,0));
return getEnemy(Enemy.AIRPLANE, 20, 3, way, startPoint);
}
public void dismissAirplane(Enemy enemy) {
dismissEnemy(Enemy.AIRPLANE, enemy);
}
public Enemy getTank() {
Point startPoint = randomWayPoint(60);
ArrayList<Point> way = new ArrayList<Point>();
way.add(new Point(0,0));
return getEnemy(Enemy.TANK, 100, 2, way, startPoint);
}
public void dismissTank(Enemy enemy) {
dismissEnemy(Enemy.TANK, enemy);
}
public Enemy getBullet(Point startPoint, Point targetLocation) {
ArrayList<Point> way = new ArrayList<Point>();
way.add(targetLocation);
return getEnemy(Enemy.BULLET, 10, 10, way, startPoint);
}
public void dismissBullet(Enemy enemy) {
dismissEnemy(Enemy.BULLET, enemy);
}
public Tower getTower(String markerName) {
Tower tower = new Tower(gameContext, TOWER_OBJMODEL, markerName);
activeTowers.add(tower);
try {
artoolkit.registerARObject(tower.model3D);
} catch (AndARException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return tower;
}
private Point randomWayPoint(int distanceFromCenter) {
/* In normal view distance you can see a model on screen in a range
* of: model.xpos/model.ypos: (-30..+30)/(-30..+30)
*/
// enemies head to center starting on a distance of 15
int radius = distanceFromCenter;
double maxX = Math.sqrt((radius*radius)/2);
double deltaX = Math.signum(Math.random()-0.5) * Math.random() * maxX;
double deltaY = Math.signum(Math.random()-0.5) * Math.sqrt(radius*radius - deltaX*deltaX);
return new Point((int)deltaX, (int)deltaY);
}
private ParsedObjModel loadModelFromFile(String modelFileName) {
// read the model file:
ParsedObjModel parsedObjModel = null;
if (modelFileName.endsWith(".obj")) {
try {
if (Config.DEBUG)
Debug.startMethodTracing("AndObjViewer");
if (fileUtil != null) {
BufferedReader fileReader = fileUtil.getReaderFromName(modelFileName);
if (fileReader != null) {
parsedObjModel = new ParsedObjModel(modelFileName);
parser.parse(parsedObjModel, modelFileName.substring(0,
modelFileName.length() - 4), fileReader);
Log.i(tag , "new Model3D: " + modelFileName);
} else {
Log.w("ModelLoader", "no file reader: "
+ modelFileName);
}
}
if (Config.DEBUG)
Debug.stopMethodTracing();
} catch (IOException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return parsedObjModel;
}
public Model getCenter() {
return center;
}
private Model loadCenter() {
// load red circle on centermarker
String centerModelName = "energy.obj";
ParsedObjModel energyObjModel = loadModelFromFile(centerModelName);
Model model = new Model(gameContext, energyObjModel, ModelPool.CENTER_PATTERN);
model.name = "center";
try {
artoolkit.registerARObject(model.model3D);
} catch (AndARException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return model;
}
public synchronized ArrayList<Enemy> getActiveEnemies() {
ArrayList<Enemy> allActiveEnemies = new ArrayList<Enemy>();
allActiveEnemies.addAll(activeAirplanes);
allActiveEnemies.addAll(activeTanks);
return allActiveEnemies;
}
public ArrayList<Enemy> getActiveBullets() {
return activeBullets;
}
public ArrayList<Tower> getActiveTowers() {
return activeTowers;
}
}
<file_sep>package andar.tower.defense;
import andar.tower.defense.model.Enemy;
import andar.tower.defense.model.Tower;
import android.os.Message;
public class GameThread extends Thread {
// Game objects:
private GameContext gameContext;
private boolean running = true;
// time
long prevTime;
long prevLongTime;
long currTime;
private GameActivityHandler gameActivityHandler;
public boolean loadingDone = false;
// game area limits
public static final float UPPERLIMITX = 200;
public static final float LOWERLIMITX = -200;
public static final float UPPERLIMITY = 150;
public static final float LOWERLIMITY = -150;
public GameThread(GameActivityHandler gameActivityHandler,
GameContext gameContext) {
this.gameActivityHandler = gameActivityHandler;
this.gameContext = gameContext;
setDaemon(true);
start();
}
public void updateHUD(float x, float y) {
Message msg = Message.obtain(gameActivityHandler,
gameActivityHandler.UPDATE_X_Y, (int) x, (int) y);
msg.sendToTarget();
}
@Override
public synchronized void run() {
super.run();
setName("GameThread");
prevTime = System.currentTimeMillis();
long deltaTime;
long deltaLongTime = 15 * 1000;// nanoseconds
long maxDeltaLong = 15 * 1000;
yield();
while (running) {
if (loadingDone) {
currTime = System.currentTimeMillis();
deltaTime = currTime - prevTime;
deltaLongTime = currTime - prevLongTime;
// update monsters all 15 seconds
if (deltaLongTime > maxDeltaLong) {
gameContext.createEnemy();
prevLongTime = currTime;
}
prevTime = currTime;
gameContext.gameCenter.update(deltaTime);
// update all positions
synchronized (gameContext.modelPool) {
for (Enemy enemy : gameContext.modelPool.getActiveEnemies()) {
enemy.positionUpdate(gameContext);
}
}
synchronized (gameContext.modelPool) {
for (Enemy bullet : gameContext.modelPool
.getActiveBullets()) {
bullet.positionUpdate(gameContext);
}
}
// update positioning of towers
int i = 0;
for (Tower tower : gameContext.modelPool.getActiveTowers()) {
tower.model3D.update(deltaTime, gameContext.gameCenter);
int minDistance = tower
.updateNearestEnemyInRange(gameContext.modelPool
.getActiveEnemies());
tower.attack();
if (i == 0) {
updateHUD(tower.model3D.getX(), minDistance);
// updateHUD(tower.model3D.getX(),
// tower.model3D.getY());
}
i++;
}
}
yield();
}
}
public void setRunning(boolean running) {
this.running = running;
}
}
<file_sep>package andar.tower.defense;
import andar.tower.defense.model.ModelPool;
import andar.tower.defense.util.AssetsFileUtil;
import andar.tower.defense.util.BaseFileUtil;
import android.app.ProgressDialog;
import android.content.res.Resources;
import android.graphics.Point;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Message;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.View;
import android.widget.TextView;
import android.widget.RelativeLayout.LayoutParams;
import edu.dhbw.andar.ARToolkit;
import edu.dhbw.andar.AndARActivity;
import edu.dhbw.andar.exceptions.AndARException;
public class GameActivity extends AndARActivity implements
SurfaceHolder.Callback {
private static final String tag = "GameActivity";
private ProgressDialog waitDialog;
private Resources res;
public static final boolean DEBUG = false;
ARToolkit artoolkit;
private andar.tower.defense.GameThread gameThread;
private GameActivityHandler handler;
TextView hud_x;
TextView hud_y;
private GameContext gameContext;
public GameActivity() {
super(false);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
super.setNonARRenderer(new LightingRenderer());// or might be omited
res = getResources();
artoolkit = getArtoolkit();
// getSurfaceView().setOnTouchListener(new TouchEventHandler());
getSurfaceView().getHolder().addCallback(this);
GameCenter gameCenter = new GameCenter("center", ModelPool.CENTER_PATTERN,
137.0, new double[] { 0, 0 });// 170
try {
artoolkit.registerARObject(gameCenter);
} catch (AndARException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
createHUD();
gameContext = new GameContext(100, gameCenter, handler);
gameThread = new GameThread(handler, gameContext);
}
public void createHUD() {
// add layout
LayoutInflater controlInflater = LayoutInflater.from(getBaseContext());
View viewControl = controlInflater.inflate(R.layout.hud, null);
LayoutParams layoutParamsControl = new LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT);
this.addContentView(viewControl, layoutParamsControl);
// Handler for UI callbacks
hud_x = (TextView) findViewById(R.id.tower_x);
hud_y = (TextView) findViewById(R.id.tower_y);
handler = new GameActivityHandler() {
@Override
public void handleMessage(Message msg) {
if (msg.getData().containsKey("test")) {
int i = 1;
}
if (msg.what == UPDATE_X_Y) {
int x = msg.arg1;
hud_x.setText("Tower rel. x: " + x);
hud_y.setText("Dist En-Tow: " + msg.arg2);
} else if (msg.what == GET_TANK) {
gameContext.modelPool.getTank();
} else if (msg.what == GET_AIRPLANE) {
gameContext.modelPool.getAirplane();
} else if (msg.what == GET_BULLET) {
Bundle data = msg.getData();
Point startPoint = new Point(data.getInt("start_x"),data.getInt("start_y"));
Point targetPoint = new Point(data.getInt("target_x"),data.getInt("target_y"));
gameContext.modelPool.getBullet(startPoint, targetPoint);
} else {
Log.i(tag, "Unknown handle with what-code: " + msg.what);
}
}
};
}
/**
* Inform the user about exceptions that occurred in background threads.
*/
@Override
public void uncaughtException(Thread thread, Throwable ex) {
System.out.println("Exception Occured !!");
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
// load the model
// this is done here, to assure the surface was already created, so that
// the preview can be started
// after loading the model
if (!gameThread.loadingDone) {
waitDialog = ProgressDialog.show(this, "", getResources().getText(
R.string.loading), true);
waitDialog.show();
new ModelLoader().execute();
}
}
private class ModelLoader extends AsyncTask<Void, Void, Void> {
/* time measures in ms:
* ObjParser.parse: 3000-10000 ms
* new Model3D(...) 100 ms
* registerARObject 10 ms
*/
BaseFileUtil fileUtil;
private String tag = "GameActivity";
public ModelLoader() {
super();
fileUtil = new AssetsFileUtil(getResources().getAssets());
fileUtil.setBaseFolder("models/");
}
@Override
protected Void doInBackground(Void... params) {
gameContext.modelPool = new ModelPool(gameContext, artoolkit, fileUtil);
gameContext.modelPool.getTower("marker_rupee16.patt");
gameContext.modelPool.getTower("marker_peace16.patt");
gameContext.modelPool.getTower("marker_at16.patt");
gameContext.modelPool.getTower("marker_hand16.patt");
/* loading finished */
gameThread.loadingDone = true;
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
waitDialog.dismiss();
Log.d("Starting Preview", "Preview starting ");
startPreview();
}
}
public class Config {
public final static boolean DEBUG = false;
}
}
| e1aa320b766f118cd391f2180ee9668632e32d6b | [
"Java"
] | 4 | Java | AkshayPramoth/andar-tower-defense | 1353538aaa1c6b6fdffeb7d39cf295f88e69fa17 | 8402daf06daad8e4ce12129fd845479bd839d759 | |
refs/heads/master | <file_sep>a = int(input("enter number1: "))
b = int(input("enter number2: "))
if a == b:
print("true")
else a + b <file_sep>#Write a program to convert binary number to Decimal number
num = int(input("Enter binary number: "))
a = 0
i = 0
#print("decimal representation of ",n,end=" is ")
while (n!=0):
rem =int(n%10)
a =a+rem*(2**i)
n = n/10
i+=1
print(a)
<file_sep>#Write a Python program to calculate the sum of the digits in an integer
def decimaltobinary(num):
if num>1:
decimaltobinary(num//2)
print(num%2)
a = int(input("enter number: "))
decimaltobinary(a)<file_sep>number1 = int(input("enter number1"))
operator = input("enter operator")
number2 = int(input("enter number2"))
if operator == "+":
c=number1+number2
print(c)
elif operator=="-":
c=number1-number2
print(c)
elif operator == "*":
c=number1*number2
print(c)
elif operator == "/":
c=number1/number2
print(c)
else:
print("error")<file_sep>print(input("enter your name "))
a = int(input("Enter your physics marks "))
b = int(input("Enter your mathematics marks "))
c = int(input("Enter your english marks "))
d = int(input("Enter your chemistry marks "))
e = int(input("Enter your urdu marks "))
Total_marks = a + b + c + d + e
percentage = (Total_marks/500)*100
if percentage >=80:
print("your percentage is " + str(percentage)+" and Grade is A+")
elif percentage >=70:
print("your percentage is " + str(percentage)+" and Grade is A")
elif percentage>=60:
print("your percentage is " + str(percentage)+" and Grade is B")
elif percentage >=50:
print("your percentage is " + str(percentage)+" and Grade is C")
elif percentage>=40:
print("your percentage is " + str(percentage)+" and Grade is D")
else:
print("your percentage is " + str(percentage)+" and you are fail")<file_sep>#student management system
students = []
while True:
print("\n\nwelcome to student management system", "\n")
print("press 1 to add student ")
print("press 2 to delete student")
print("press 3 to search student")
print("press 4 to check total enrolled students")
print("press 5 to exit \n")
choice = int(input("enter your choice: "))
if choice == 1:
student = {}
student["name"] = input("enter name: ")
student["father name"] = input("enter father name: ")
student["phone number"] = input("enter phone number: ")
student["age"] = input("enter age: ")
students.append(student)
print("you added student", student)
print("list of total student is", students)
elif choice == 2:
a = input("enter the name of studnt you want to delete: ")
for student in students:
if student["name"].lower() == a.lower():
s
print(students)
elif choice == 3:
b = input("enter the name you want to search: ")
for student in students:
if student["name"].lower() == b.lower():
print("yes ",b , "is our student")
elif choice == 4:
print("total enrolled students are", len(students))
elif choice == 5:
break
else:
print("you entered invalid choice")
<file_sep>for i in range(2,10):
print("ayesha")<file_sep>products = [{'name':'clothes', 'quantity':1}]
while True:
print("press 1 to sale item ")
print("press 2 to puchase item")
print("press 3 to delete product")
print("press 4 to edit the data")
print("press 5 to exit")
choice = int(input("enter your choice"))
if choice ==1:
print(products)
elif choice ==5:
break<file_sep># topic: variable scoping
# a = 2
# def my_func():
# a = 9
# print(a)
# my_func()
# print(a)
#topic: inner functions
# def calculate_taxes(percent):
# def actual_tax(salary):
# return salary*percent
# return actual_tax
# actual_tax_fn = calculate_taxes(0.30)
# print(actual_tax_fn)
# print(actual_tax_fn(1000))
# def calculator(num1,num2,op)
# def add(n1,n2):
# return
# while loop
# import random
# temp = random.randint(10,47)
# while temp<45:
# print("it\'s better weather" , temp)
# temp = random.randint(30,50)
# print("i came back due to hot weather, temp=", temp)
# #class
# class Human():
# def __init__(self): #pythons interpreter call it. you dont call it
# print("i am a constructor")
# Human()
# Human()
#
class Human():
def __init__(self , name = "<NAME>", favrt_dish= "biryani"):
self.name = name
print("i am a constructor")
def set_age(self, favrt_dish):
self.favrt_dish = favrt_dish
h1 = Human()
h2 = Human("biryani","Ayesha")
h3 = Human()
print(h2.name
print(h1.favrt_dish)]
<file_sep>name="ayesha"
father_name="jawed"
DOB="April 11,1998"
qualification="Bachelors of Engineering"
Interests="Reading, Traveling"
print(name,"\t",father_name,"\n",DOB,"\n",qualification,"\n",Interests)<file_sep>n = int(input("enter no of rows: "))
for rows in range(1, n+1):
for columns in range (1 , rows+1):
print(columns, end = "")
print()
for rows in range(n, 0, 1):
for columns in range (rows-1,0,1):
print(columns, end = "")
print()<file_sep># #print(list(range(1,10,2)))
# # n = int(input("enter rows: "))
# # for i in range(1,n):
# # print("*"*i)
# a = 2
# b = a
# b = 5
# print(a)
# print(b)
# print("a", end = "" )
# print("a", end = "" )
# print("a", end = "" )
# print("a", end = "" )
# a = "ayesha"
# print(a.upper())
# print(a.lower())
# print(a.capitalize())
#student = ["ayesha", 923408652122, "1998" ]
#dictionaries
student = {
"name" : "ayesha",
"cell#" : 923408652122,
"age" : 21,
"father name" : "jawed"
}
# print(student)
# print(student["name"])
# student["name"] = "<NAME>"
# print(student)
# student["module"] = "module 1"
# student["course"] = "artificial intelligence"
# print(student)
# del student["module"]
# print(student.pop("name"))
# print(student)
# students = []
# for i in range(3):
# student = {}
# student["name"] = input("enter name: ")
# student["father name"] = input("enter father name: ")
# student["age"] = input("enter age: ")
# students.append(student)
# print("currently enrolled students" , len(students))
# for student in students:
# print("student")
# print(student)
# print(students)
# # if student["name"].lower() == "inam":
# # print("yes inam is our student")
print(student.values())
print(student.keys())
print(student.items())
# for k in student:
# print(student[k])
# print(k)
print("name" in student)<file_sep>for i in range(1,10)
print(i)
print("ayesha")<file_sep>r = int(input("enter radius: "))
print("area of sphere is: ",(4*3.142*r**3)/3)<file_sep>import datetime
v = input("If you want to continue...Yes...\notherwise press any key to exit : ")
print(" ")
while True:
a = input(":>>")
if a == "exit" or a == "no":
break
elif a == "date":
print(datetime.date.today())
elif a == "name" or a == "your name" or a == "what is your name" or a == "what is your good name":
print("Osama robot".title())
elif a == "time":
print(datetime.datetime.now())
elif a == "gender" or a == "your gender":
print("Sorry! no gender")
elif a == "what are you doing" or a == "what are you doing now":
print("Answering to you")
elif a == "bye" or a == "byee":
print("<NAME>")
else:
print(">>"+" "+a.title()+" "+ "\nsyntax error :")
print(" ")<file_sep>a = int(input("enterheiht in feet: "))
b = a*30.48
print("There are ",b,"Cm in",a,"feet")<file_sep>number=int(input('Enter any Number:'))
if number % 2 == 0:
print('Number' + str(number) + 'is Even')
else:
print('Number' + str(number) +'is Odd')<file_sep># # a = int(input("enter number1: "))
# # b = int(input("enter number2: "))
# # def sub(num1,num2):
# # c = num1 - num2
# # print(c)
# # def mult(num1,num2):
# # mul= num1 * num2
# # print(mul)
# # def div(num1,num2):
# # divi= num2/num1
# # print(divi)
# # sub(a,b)
# # mult(a,b)
# # div(a,b)
# # position
# def my_pet(owner, pet):
# print(owner, "is an owner of ", pet)
# my_pet("cat","Sarah")
# # keyword argument passing
# def my_pet(owner, pet):
# print(owner, "is an owner of ", pet)
# my_pet(pet="cat",owner="Sarah")
# # default parameter
# def my_pet(owner, pet,city=karachi):#karachi default me dia hua h agr mny neechy argument me pass nh kri tw yh karachi print krdega.. lkn agr defalut nh di hui tw phir parameter pass krna zaruri h
# print(owner, "is an owner of ", pet,".They are from",city)
# my_pet(pet="cat",owner="Sarah")#yhn city ka parameter hmny nh pass kia tw wo default me karachi print kr dega.. lkjn agr yhn mny city dia kuch or tw wo wahi print krega
# #takes nothing and return something
# def sum():
# a=2
# b=3
# return(a+b)
# result=sum()
# print(result)
# #takes something and returns something
# def sum(val1,val2):
# result=val1+val2
# return result
# output_of_result= sum(a.b)
# print(output_of_result)
#task:
#take an input from user , pass it in the argument and return whether it is even or odd
a=int(input('Enter Any Num'))
def even(num):
if num % 2 == 0:
return 'Num is Even'
else:
return 'Num is Odd'
output=even(a)
<file_sep>#practice
students = ["ayesha" , "ariba" , "hamza"]
print(students)
students.append("alishba")
print(students)
a = students + ["saba" , "shabana" , "iqra"]
print(a)
a[0] = "sania"
print(a)<file_sep>a = int(input())
b= int(input())
c = int(input()
obtain_marks = a+b+c
percentage = (obtain_marks/300)*100
print(obtain_marks, percentage)<file_sep>n = int(input("Enter n: "))
sum = 0
for i in range(0,n+1,1):
sum = sum+i
print("Sum of",n, "integers is",sum)<file_sep>#write a python program to to compute the future value of specified principle amount, rate of interest and number of years
a = float(input("please enter principle amount: "))
b = float(input("please enter rate of interest in %: "))
c = float(input("enter years of investment: "))
d = a*(1+b)**c
print("After ", c," years your principle amount ", a, "over an interest rate of ",b,"% will be ", d)<file_sep>import random
array = []
for i in range(0,100):
array.append(random.randint(10,10000))
for j in array:
print(j)
a = max(array)
print("maximum element of array=", a)
print("index of maximum element of array=", array.index(a))
b = min(array)
print("minimum element of array=", b)
print("index of minimum element of array=",array.index(b))
c = int((sum(array))/len(array))
print("mean of elements of array=",c)<file_sep># n =int(input("Enter Number of Rows: "))
# for row in range(1,n+1):
# for col in range(1,row+1):
# print(col,end="")
# print()
import datetime
print(datetime.datetime.now())
<file_sep>#count alphabet,numbers and special charcters,spaces
word = input("enter word: ").lower()
alphabet = 0
space = 0
number = 0
special_characters = 0
for i in word:
if i=="a"or i =="e" or i == "o" or i == "u" or i == "i" or i =="b" or i =="c" or i =="d" or i =="f" or i =="g" or i =="h" or i =="j" or i =="k" or i =="l" or i =="m" or i =="n" or i =="p" or i =="q" or i =="r" or i =="s" or i =="t" or i =="v" or i =="w" or i =="x" or i =="y" or i =="z":
alphabet = alphabet+1
elif i == " ":
space = space+1
elif i == "1" or i == "2" or i == "3" or i =="4" or i =="5" or i == "6" or i =="7" or i =="8" or i == "9" or i =="0":
number = number +1
elif i == "=" or i == "'" or i == ";" or i == ":" or i == "|" or i == "!" or i == "*" or i == "^" or i == "$" or i == "%" or i == "#" or i == "@" or i == "<" or i == ">" or i == ".":
special_characters = special_characters+1
print("alphabets =",alphabet)
print("spaces=",space)
print("number=",number)
print("special charcters = ",special_characters)<file_sep>dir(__builtins__)
import math
print(dir(math))
import os
print(dir(os))
<file_sep>#write a python program to get an string which is n copies of a given string
a = input("Enter string: ")
b = int(input("How many copies of string you need: "))
print(a*b)<file_sep>a = str(input("write a line: "))
if a.startswith("is"):
print(a)
else:
print("is ",a)
<file_sep>#palidrome tester
a = input("enter text: ").lower()
reverse = a[::-1]
if a == reverse:
print("enterd text",a," is palindrome")
else:
print("enterd text",a," is not a palindrome")<file_sep>import random
for i in range(0,10):
print(random.randint(0,500))<file_sep>n = int(input("enter no of rows: " ))
for rows in range(1, n+1):
for col in range (1, rows+1):
print("*", end = "")
print()
for rows in range(n-1,0,-1):
for col in range(rows-1,0,-1):
print("*" , end = "")
print()<file_sep># a = 9
# b = 2
# c = -5
# d = 10
# e = 11
# print(a+b, a-b, a*b, a/b)
# print(a//b, b**9, a%b)
# print(a&b, a|b, a^b)
# print("a>b")
# print("b<a")
a = 5
while a>5:
print("ayesha")
print("jawed")
<file_sep>a = int(input("enter any number: "))
if a % 2 ==0:
print("number is even")
else:
print("number is odd")<file_sep>#write a python program to compute the area of triangle
h = int(input("enter height of triangle: "))
b = int(input("enter base of triangle: "))
area = h*b/2
print("area of triangle is ", area) | 599be43741089a8d0470619e4d0081f50ee08355 | [
"Python"
] | 34 | Python | Ayesha116/official.assigment | a45bbc6fe5080b00d28748512517db1ca065be5e | 8bc0b1a5580b8835b545d90d0ac75ac8db5eb88e | |
refs/heads/master | <file_sep>package edu.rosehulman.moviequotes
import android.content.Context
import android.content.DialogInterface
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.appcompat.app.AlertDialog
import androidx.recyclerview.widget.RecyclerView
import kotlinx.android.synthetic.main.dialog_add.view.*
class MovieQuoteAdapter(private val context: Context) :
RecyclerView.Adapter<MovieQuoteViewHolder>(),
FirestoreQuotesManager.Observer
{
init {
FirestoreQuotesManager.setObserver(this)
FirestoreQuotesManager.beginListening()
}
fun stopListening() {
FirestoreQuotesManager.stopListening()
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): MovieQuoteViewHolder {
val view = LayoutInflater.from(context).inflate(R.layout.row_view, null, false)
return MovieQuoteViewHolder(view, this, context)
}
override fun getItemCount() = FirestoreQuotesManager.getNumQuotes()
override fun onBindViewHolder(holder: MovieQuoteViewHolder, position: Int) {
val movieQuote = FirestoreQuotesManager.getQuoteAt(position)
holder.bind(movieQuote)
}
fun showAddEditDialog(position: Int) {
// pos of -1 means add
val builder = AlertDialog.Builder(context)
builder.setTitle(if (position < 0) R.string.add_dialog_title else R.string.edit_dialog_title)
val view = LayoutInflater.from(context).inflate(R.layout.dialog_add, null, false)
builder.setView(view)
if (position >= 0) {
val mq = FirestoreQuotesManager.getQuoteAt(position)
view.quote_edit_text.setText(mq.quote)
view.movie_edit_text.setText(mq.movie)
}
builder.setPositiveButton(android.R.string.ok) { _: DialogInterface?, _: Int ->
val quote = view.quote_edit_text.text.toString()
val movie = view.movie_edit_text.text.toString()
val movieQuote = MovieQuote(quote, movie)
if (position < 0) {
add(movieQuote)
} else {
edit(quote, movie, position)
}
}
if (position >= 0) {
builder.setNeutralButton("Delete") { _, _ ->
delete(position)
}
}
builder.setNegativeButton(android.R.string.cancel, null)
builder.create().show()
}
fun add(movieQuote: MovieQuote) {
FirestoreQuotesManager.add(movieQuote)
}
private fun edit(quote: String, movie: String, position: Int) {
FirestoreQuotesManager.edit(quote, movie, position)
}
private fun delete(position: Int) {
FirestoreQuotesManager.delete(position)
}
fun select(position: Int) {
FirestoreQuotesManager.select(position)
}
}<file_sep>package edu.rosehulman.moviequotes
import android.util.Log
import com.google.firebase.firestore.*
object FirestoreQuotesManager {
private val movieQuotes = ArrayList<MovieQuote>()
private lateinit var quotesRef: CollectionReference
private lateinit var quotesRegistration: ListenerRegistration
private var observer: Observer? = null
fun setObserver(observer: Observer) {
this.observer = observer
}
fun beginListening() {
quotesRef = FirebaseFirestore
.getInstance()
.collection(Constants.QUOTES_COLLECTION)
quotesRegistration = quotesRef
.orderBy(MovieQuote.CREATED_KEY, Query.Direction.ASCENDING)
.addSnapshotListener { snapshot: QuerySnapshot?, exception: FirebaseFirestoreException? ->
if (exception != null) {
Log.e(Constants.TAG, "Error: $exception")
return@addSnapshotListener
}
for (docChange in snapshot!!.documentChanges) {
val mq = MovieQuote.from(docChange.document)
when (docChange.type) {
DocumentChange.Type.ADDED -> {
movieQuotes.add(0, mq)
observer?.notifyItemInserted(0)
}
DocumentChange.Type.REMOVED -> {
val pos = movieQuotes.indexOfFirst { it.id == mq.id }
movieQuotes.removeAt(pos)
observer?.notifyItemRemoved(pos)
}
DocumentChange.Type.MODIFIED -> {
val pos = movieQuotes.indexOfFirst { it.id == mq.id }
movieQuotes[pos] = mq
observer?.notifyItemChanged(pos)
}
}
}
}
}
fun stopListening() {
Log.d(Constants.TAG, "Removing listener")
quotesRegistration.remove()
}
fun getNumQuotes() = movieQuotes.size
fun getQuoteAt(position: Int) = movieQuotes[position]
fun add(movieQuote: MovieQuote) {
quotesRef.add(movieQuote)
}
fun edit(quote: String, movie: String, position: Int) {
movieQuotes[position].quote = quote
movieQuotes[position].movie = movie
quotesRef.document(movieQuotes[position].id).set(movieQuotes[position])
}
fun delete(position: Int) {
quotesRef.document(movieQuotes[position].id).delete()
}
fun select(position: Int) {
movieQuotes[position].isSelected = !movieQuotes[position].isSelected
quotesRef.document(movieQuotes[position].id).set(movieQuotes[position])
}
interface Observer {
// Interestingly, if we use the same names as the
// RecyclerView.Adapter methods, then our custom MovieQuote adapter
// already inherits them from RecyclerView.Adapter.
fun notifyItemInserted(position: Int)
fun notifyItemRemoved(position: Int)
fun notifyItemChanged(position: Int)
}
}<file_sep># MovieQuotesDAOSolution
For class
| f022334831a3b23fe779e327ce756d9e0b9051eb | [
"Markdown",
"Kotlin"
] | 3 | Kotlin | AndroidKotlinCourseMaterial/MovieQuotesDAOSolution | a1dc66ccc1aeb77c11aa4663ca8c1d114a686824 | 2a180a588175d45b5c6d16d785d98cf946bdb3a2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.