file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
api.go | := ctx.Args().Get(0)
action := ctx.Args().Get(1)
return c.client.ChangeAPIStatus(id, wso2am.APIAction(action))
},
}
}
func (c *CLI) apiDelete() cli.Command {
return cli.Command{
Name: "delete",
Aliases: []string{"del", "rm"},
Usage: "Delete the API",
Flags: []cli.Flag{
cli.BoolFlag{
Name: "all,a",
},
cli.BoolFlag{
Name: "force,f",
},
},
ArgsUsage: "ID...",
Action: func(ctx *cli.Context) error {
// define rm func
var errs error
rm := func(id string) {
if err := c.client.DeleteAPI(id); err != nil {
if ctx.Bool("force") {
if err := c.client.ChangeAPIStatus(id, wso2am.APIActionDeprecate); err != nil {
errs = multierror.Append(errs, err)
fmt.Println(err)
return
}
if err := c.client.ChangeAPIStatus(id, wso2am.APIActionRetire); err != nil {
errs = multierror.Append(errs, err)
fmt.Println(err)
return
}
if err := c.client.DeleteAPI(id); err != nil {
errs = multierror.Append(errs, err)
fmt.Println(err)
return
}
} else {
errs = multierror.Append(errs, err)
fmt.Println(err)
}
} else {
fmt.Println(id)
}
}
// delete apis
if ctx.Bool("all") {
var (
apic = make(chan wso2am.API)
errc = make(chan error)
done = make(chan struct{})
)
go func() {
defer func() {
close(apic)
close(errc)
close(done)
}()
c.client.SearchAPIs("", apic, errc, done)
}()
l:
for {
select {
case a, ok := <-apic:
if ok {
rm(a.ID)
} else {
break l
}
case err, ok := <-errc:
if ok {
errs = multierror.Append(errs, err)
} else {
break l
}
}
}
} else {
for _, id := range ctx.Args() {
rm(id)
}
}
return errs
},
}
}
func (c *CLI) apiInspect() cli.Command {
return cli.Command{
Name: "inspect",
Aliases: []string{"show", "cat"},
Usage: "Inspect the API",
ArgsUsage: "ID",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 1 {
return errors.New("ID is required")
}
id := ctx.Args().Get(0)
api, err := c.client.API(id)
if err != nil {
return err
}
return c.inspect(api)
},
}
}
func (c *CLI) apiSwagger() cli.Command {
return cli.Command{
Name: "swagger",
Usage: "Inspect the API definition",
ArgsUsage: "ID",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 1 {
return errors.New("ID is required")
}
id := ctx.Args().Get(0)
def, err := c.client.APIDefinition(id)
if err != nil {
return err
}
return c.inspect(def)
},
}
}
func (c *CLI) apiUpdateSwagger() cli.Command {
return cli.Command{
Name: "update-swagger",
Usage: "Update the API definition",
ArgsUsage: "ID SWAGGERFILE",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 2 {
return errors.New("ID and SWAGGERFILE are required")
}
id := ctx.Args().Get(0)
def, err := wso2am.NewAPIDefinitionFromFile(ctx.Args().Get(1))
if err != nil {
return err
}
if _, err := c.client.UpdateAPIDefinition(id, def); err != nil {
return err
}
return nil
},
}
}
func (c *CLI) apiThumbnail() cli.Command {
return cli.Command{
Name: "thumbnail",
Usage: "Download the thumbnail",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 1 {
return errors.New("ID is required")
}
id := ctx.Args().Get(0)
return c.client.Thumbnail(id, os.Stdout)
},
}
}
func (c *CLI) apiUploadThumbnail() cli.Command {
return cli.Command{
Name: "upload-thumbnail",
Usage: "Upload the thumbnail",
ArgsUsage: "ID FILE",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 2 {
return errors.New("ID and FILE are required")
}
id := ctx.Args().Get(0)
file := ctx.Args().Get(1)
f, err := os.Open(file)
if err != nil {
return err
}
defer f.Close()
if _, err := c.client.UploadThumbnail(id, f); err != nil {
return err
}
return nil
},
}
}
func (c *CLI) apiCreate(update bool) cli.Command {
var commandName string
var commandAliases []string
var commandUsage string
var commandArgsUsage string
flags := []cli.Flag{
cli.StringFlag{
Name: "definition",
},
cli.StringFlag{
Name: "name",
},
cli.StringFlag{
Name: "context",
},
cli.StringFlag{
Name: "version",
},
cli.StringFlag{
Name: "provider",
},
cli.StringFlag{
Name: "production-url",
Value: "http://localhost/",
},
cli.StringFlag{
Name: "sandbox-url",
Value: "http://localhost/",
},
cli.StringFlag{
Name: "gateway-env",
},
cli.BoolFlag{
Name: "publish,P",
},
cli.StringSliceFlag{
Name: "visible-role",
},
}
if update {
commandName = "update"
commandUsage = "Update the API"
commandArgsUsage = "ID"
} else {
commandName = "create"
commandAliases = []string{"new"}
commandUsage = "Create the API"
flags = append(flags, cli.BoolFlag{
Name: "update",
})
}
return cli.Command{
Name: commandName,
Aliases: commandAliases,
Usage: commandUsage,
ArgsUsage: commandArgsUsage,
Flags: flags,
Action: func(ctx *cli.Context) error {
if update {
if ctx.NArg() != 1 {
return errors.New("APIID is required")
}
unmodifiableFlags := []string{"name", "version", "context", "provider", "state"}
for _, f := range unmodifiableFlags {
if ctx.IsSet(f) {
return fmt.Errorf(`"Cannot update %v"`, unmodifiableFlags)
}
}
} else {
if err := c.checkRequiredParameters(ctx, "definition", "name", "context", "version", "production-url", "gateway-env"); err != nil {
return err
}
}
var api *wso2am.APIDetail
if update {
id := ctx.Args().First()
a, err := c.client.API(id)
if err != nil {
return err
}
api = a
} else {
api = c.client.NewAPI()
}
if ctx.IsSet("definition") {
swaggerFile := ctx.String("definition")
def, err := wso2am.NewAPIDefinitionFromFile(swaggerFile)
if err != nil {
return err
}
api.Definition = def
}
if ctx.IsSet("name") {
api.Name = ctx.String("name")
}
if ctx.IsSet("context") {
api.Context = ctx.String("context")
}
if ctx.IsSet("version") {
api.Version = ctx.String("version")
}
if ctx.IsSet("gateway-env") {
api.GatewayEnvironments = ctx.String("gateway-env")
}
if ctx.IsSet("provider") {
api.Provider = ctx.String("provider")
}
if ctx.IsSet("visible-role") {
api.Visibility = wso2am.APIVisibilityRestricted
api.VisibleRoles = ctx.StringSlice("visible-role")
}
// endpoint config
if ctx.IsSet("production-url") || ctx.IsSet("sandbox-url") {
endpointConfig := &wso2am.APIEndpointConfig{
Type: "http",
} | random_line_split |
||
api.go | },
},
Action: func(ctx *cli.Context) error {
var query = ctx.String("query")
return list(func(entryc chan<- interface{}, errc chan<- error, done <-chan struct{}) {
c.client.SearchAPIsRaw(query, entryc, errc, done)
}, func(table *TableFormatter) {
table.Header("ID", "Name", "Version", "Description", "Status")
}, func(entry interface{}, table *TableFormatter) {
api := c.client.ConvertToAPI(entry)
table.Row(api.ID, api.Name, api.Version, trim(api.Description, 30), api.Status)
})
},
}
}
func (c *CLI) apiChangeStatus() cli.Command {
return cli.Command{
Name: "change-status",
Usage: "Change API status",
Description: fmt.Sprintf(`Change API status.
Available actions are:
- %s
- %s
- %s
- %s
- %s
- %s
- %s
- %s
`, wso2am.APIActionPublish, wso2am.APIActionDeployAsPrototype, wso2am.APIActionDemoteToCreated, wso2am.APIActionDemoteToPrototyped, wso2am.APIActionBlock, wso2am.APIActionDeprecate, wso2am.APIActionRePublish, wso2am.APIActionRetire),
ArgsUsage: "ID ACTION",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 2 {
return errors.New("ID and ACTION are required")
}
id := ctx.Args().Get(0)
action := ctx.Args().Get(1)
return c.client.ChangeAPIStatus(id, wso2am.APIAction(action))
},
}
}
func (c *CLI) apiDelete() cli.Command {
return cli.Command{
Name: "delete",
Aliases: []string{"del", "rm"},
Usage: "Delete the API",
Flags: []cli.Flag{
cli.BoolFlag{
Name: "all,a",
},
cli.BoolFlag{
Name: "force,f",
},
},
ArgsUsage: "ID...",
Action: func(ctx *cli.Context) error {
// define rm func
var errs error
rm := func(id string) {
if err := c.client.DeleteAPI(id); err != nil {
if ctx.Bool("force") {
if err := c.client.ChangeAPIStatus(id, wso2am.APIActionDeprecate); err != nil {
errs = multierror.Append(errs, err)
fmt.Println(err)
return
}
if err := c.client.ChangeAPIStatus(id, wso2am.APIActionRetire); err != nil {
errs = multierror.Append(errs, err)
fmt.Println(err)
return
}
if err := c.client.DeleteAPI(id); err != nil {
errs = multierror.Append(errs, err)
fmt.Println(err)
return
}
} else {
errs = multierror.Append(errs, err)
fmt.Println(err)
}
} else {
fmt.Println(id)
}
}
// delete apis
if ctx.Bool("all") {
var (
apic = make(chan wso2am.API)
errc = make(chan error)
done = make(chan struct{})
)
go func() {
defer func() {
close(apic)
close(errc)
close(done)
}()
c.client.SearchAPIs("", apic, errc, done)
}()
l:
for {
select {
case a, ok := <-apic:
if ok {
rm(a.ID)
} else {
break l
}
case err, ok := <-errc:
if ok {
errs = multierror.Append(errs, err)
} else {
break l
}
}
}
} else {
for _, id := range ctx.Args() {
rm(id)
}
}
return errs
},
}
}
func (c *CLI) apiInspect() cli.Command {
return cli.Command{
Name: "inspect",
Aliases: []string{"show", "cat"},
Usage: "Inspect the API",
ArgsUsage: "ID",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 1 {
return errors.New("ID is required")
}
id := ctx.Args().Get(0)
api, err := c.client.API(id)
if err != nil {
return err
}
return c.inspect(api)
},
}
}
func (c *CLI) apiSwagger() cli.Command {
return cli.Command{
Name: "swagger",
Usage: "Inspect the API definition",
ArgsUsage: "ID",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 1 {
return errors.New("ID is required")
}
id := ctx.Args().Get(0)
def, err := c.client.APIDefinition(id)
if err != nil {
return err
}
return c.inspect(def)
},
}
}
func (c *CLI) apiUpdateSwagger() cli.Command {
return cli.Command{
Name: "update-swagger",
Usage: "Update the API definition",
ArgsUsage: "ID SWAGGERFILE",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 2 {
return errors.New("ID and SWAGGERFILE are required")
}
id := ctx.Args().Get(0)
def, err := wso2am.NewAPIDefinitionFromFile(ctx.Args().Get(1))
if err != nil {
return err
}
if _, err := c.client.UpdateAPIDefinition(id, def); err != nil {
return err
}
return nil
},
}
}
func (c *CLI) | () cli.Command {
return cli.Command{
Name: "thumbnail",
Usage: "Download the thumbnail",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 1 {
return errors.New("ID is required")
}
id := ctx.Args().Get(0)
return c.client.Thumbnail(id, os.Stdout)
},
}
}
func (c *CLI) apiUploadThumbnail() cli.Command {
return cli.Command{
Name: "upload-thumbnail",
Usage: "Upload the thumbnail",
ArgsUsage: "ID FILE",
Action: func(ctx *cli.Context) error {
if ctx.NArg() != 2 {
return errors.New("ID and FILE are required")
}
id := ctx.Args().Get(0)
file := ctx.Args().Get(1)
f, err := os.Open(file)
if err != nil {
return err
}
defer f.Close()
if _, err := c.client.UploadThumbnail(id, f); err != nil {
return err
}
return nil
},
}
}
func (c *CLI) apiCreate(update bool) cli.Command {
var commandName string
var commandAliases []string
var commandUsage string
var commandArgsUsage string
flags := []cli.Flag{
cli.StringFlag{
Name: "definition",
},
cli.StringFlag{
Name: "name",
},
cli.StringFlag{
Name: "context",
},
cli.StringFlag{
Name: "version",
},
cli.StringFlag{
Name: "provider",
},
cli.StringFlag{
Name: "production-url",
Value: "http://localhost/",
},
cli.StringFlag{
Name: "sandbox-url",
Value: "http://localhost/",
},
cli.StringFlag{
Name: "gateway-env",
},
cli.BoolFlag{
Name: "publish,P",
},
cli.StringSliceFlag{
Name: "visible-role",
},
}
if update {
commandName = "update"
commandUsage = "Update the API"
commandArgsUsage = "ID"
} else {
commandName = "create"
commandAliases = []string{"new"}
commandUsage = "Create the API"
flags = append(flags, cli.BoolFlag{
Name: "update",
})
}
return cli.Command{
Name: commandName,
Aliases: commandAliases,
Usage: commandUsage,
ArgsUsage: commandArgsUsage,
Flags: flags,
Action: func(ctx *cli.Context) error {
if update {
if ctx.NArg() != 1 {
return errors.New("APIID is required")
}
unmodifiableFlags := []string{"name", "version", "context", "provider", "state"}
for _, f := range unmodifiableFlags {
if ctx.IsSet(f) {
return fmt.Errorf(`"Cannot update %v"`, unmodifiableFlags)
}
}
} else {
if err := c.checkRequiredParameters(ctx, "definition", "name | apiThumbnail | identifier_name |
main.rs | _selected = Some(ts);
}
Err(e) => {
error!("{}", Report::from_error(e));
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
}
}
let progress_bar;
if !verbose {
progress_bar = Some(ProgressBar::new(dicom_files.len() as u64));
if let Some(pb) = progress_bar.as_ref() {
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40} {pos}/{len} {wide_msg}")
.expect("Invalid progress bar template"),
);
pb.enable_steady_tick(Duration::new(0, 480_000_000));
};
} else {
progress_bar = None;
}
for file in dicom_files {
if let (Some(pc_selected), Some(ts_uid_selected)) = (file.pc_selected, file.ts_selected) {
if let Some(pb) = &progress_bar {
pb.set_message(file.sop_instance_uid.clone());
}
let cmd = store_req_command(&file.sop_class_uid, &file.sop_instance_uid, message_id);
let mut cmd_data = Vec::with_capacity(128);
cmd.write_dataset_with_ts(
&mut cmd_data,
&dicom_transfer_syntax_registry::entries::IMPLICIT_VR_LITTLE_ENDIAN.erased(),
)
.context(CreateCommandSnafu)?;
let mut object_data = Vec::with_capacity(2048);
let dicom_file =
open_file(&file.file).whatever_context("Could not open listed DICOM file")?;
let ts_selected = TransferSyntaxRegistry
.get(&ts_uid_selected)
.whatever_context("Unsupported file transfer syntax")?;
dicom_file
.write_dataset_with_ts(&mut object_data, ts_selected)
.whatever_context("Could not write object dataset")?;
let nbytes = cmd_data.len() + object_data.len();
if verbose {
info!(
"Sending file {} (~ {} kB), uid={}, sop={}, ts={}",
file.file.display(),
nbytes / 1_000,
&file.sop_instance_uid,
&file.sop_class_uid,
ts_uid_selected,
);
}
if nbytes < scu.acceptor_max_pdu_length().saturating_sub(100) as usize {
let pdu = Pdu::PData {
data: vec![
PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Command,
is_last: true,
data: cmd_data,
},
PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Data,
is_last: true,
data: object_data,
},
],
};
scu.send(&pdu)
.whatever_context("Failed to send C-STORE-RQ")?;
} else {
let pdu = Pdu::PData {
data: vec![PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Command,
is_last: true,
data: cmd_data,
}],
};
scu.send(&pdu)
.whatever_context("Failed to send C-STORE-RQ command")?;
{
let mut pdata = scu.send_pdata(pc_selected.id);
pdata
.write_all(&object_data)
.whatever_context("Failed to send C-STORE-RQ P-Data")?;
}
}
if verbose {
debug!("Awaiting response...");
}
let rsp_pdu = scu
.receive()
.whatever_context("Failed to receive C-STORE-RSP")?;
match rsp_pdu {
Pdu::PData { data } => {
let data_value = &data[0];
let cmd_obj = InMemDicomObject::read_dataset_with_ts(
&data_value.data[..],
&dicom_transfer_syntax_registry::entries::IMPLICIT_VR_LITTLE_ENDIAN
.erased(),
)
.whatever_context("Could not read response from SCP")?;
if verbose {
debug!("Full response: {:?}", cmd_obj);
}
let status = cmd_obj
.element(tags::STATUS)
.whatever_context("Could not find status code in response")?
.to_int::<u16>()
.whatever_context("Status code in response is not a valid integer")?;
let storage_sop_instance_uid = file
.sop_instance_uid
.trim_end_matches(|c: char| c.is_whitespace() || c == '\0');
match status {
// Success
0 => {
if verbose {
info!("Successfully stored instance {}", storage_sop_instance_uid);
}
}
// Warning
1 | 0x0107 | 0x0116 | 0xB000..=0xBFFF => {
warn!(
"Possible issue storing instance `{}` (status code {:04X}H)",
storage_sop_instance_uid, status
);
}
0xFF00 | 0xFF01 => {
warn!(
"Possible issue storing instance `{}`: status is pending (status code {:04X}H)",
storage_sop_instance_uid, status
);
}
0xFE00 => {
error!(
"Could not store instance `{}`: operation cancelled",
storage_sop_instance_uid
);
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
_ => {
error!(
"Failed to store instance `{}` (status code {:04X}H)",
storage_sop_instance_uid, status
);
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
}
}
pdu @ Pdu::Unknown { .. }
| pdu @ Pdu::AssociationRQ { .. }
| pdu @ Pdu::AssociationAC { .. }
| pdu @ Pdu::AssociationRJ { .. }
| pdu @ Pdu::ReleaseRQ
| pdu @ Pdu::ReleaseRP
| pdu @ Pdu::AbortRQ { .. } => {
error!("Unexpected SCP response: {:?}", pdu);
let _ = scu.abort();
std::process::exit(-2);
}
}
}
if let Some(pb) = progress_bar.as_ref() {
pb.inc(1)
};
}
if let Some(pb) = progress_bar {
pb.finish_with_message("done")
};
scu.release()
.whatever_context("Failed to release SCU association")?;
Ok(())
}
fn store_req_command(
storage_sop_class_uid: &str,
storage_sop_instance_uid: &str,
message_id: u16,
) -> InMemDicomObject<StandardDataDictionary> {
InMemDicomObject::command_from_element_iter([
// SOP Class UID
DataElement::new(
tags::AFFECTED_SOP_CLASS_UID,
VR::UI,
dicom_value!(Str, storage_sop_class_uid),
),
// command field
DataElement::new(tags::COMMAND_FIELD, VR::US, dicom_value!(U16, [0x0001])),
// message ID
DataElement::new(tags::MESSAGE_ID, VR::US, dicom_value!(U16, [message_id])),
//priority
DataElement::new(tags::PRIORITY, VR::US, dicom_value!(U16, [0x0000])),
// data set type
DataElement::new(
tags::COMMAND_DATA_SET_TYPE,
VR::US,
dicom_value!(U16, [0x0000]),
),
// affected SOP Instance UID
DataElement::new(
tags::AFFECTED_SOP_INSTANCE_UID,
VR::UI,
dicom_value!(Str, storage_sop_instance_uid),
),
])
}
fn check_file(file: &Path) -> Result<DicomFile, Error> {
// Ignore DICOMDIR files until better support is added
let _ = (file.file_name() != Some(OsStr::new("DICOMDIR")))
.then_some(false)
.whatever_context("DICOMDIR file not supported")?;
let dicom_file = dicom_object::OpenFileOptions::new()
.read_until(Tag(0x0001, 0x000))
.open_file(file)
.with_whatever_context(|_| format!("Could not open DICOM file {}", file.display()))?;
let meta = dicom_file.meta();
let storage_sop_class_uid = &meta.media_storage_sop_class_uid;
let storage_sop_instance_uid = &meta.media_storage_sop_instance_uid;
let transfer_syntax_uid = &meta.transfer_syntax.trim_end_matches('\0');
let ts = TransferSyntaxRegistry
.get(transfer_syntax_uid)
.whatever_context("Unsupported file transfer syntax")?;
Ok(DicomFile {
file: file.to_path_buf(),
sop_class_uid: storage_sop_class_uid.to_string(),
sop_instance_uid: storage_sop_instance_uid.to_string(),
file_transfer_syntax: String::from(ts.uid()),
ts_selected: None,
pc_selected: None,
})
}
fn | check_presentation_contexts | identifier_name |
|
main.rs | exit(-2);
});
}
fn run() -> Result<(), Error> {
let App {
addr,
files,
verbose,
message_id,
calling_ae_title,
called_ae_title,
max_pdu_length,
fail_first,
} = App::parse();
tracing::subscriber::set_global_default(
tracing_subscriber::FmtSubscriber::builder()
.with_max_level(if verbose { Level::DEBUG } else { Level::INFO })
.finish(),
)
.whatever_context("Could not set up global logging subscriber")
.unwrap_or_else(|e: Whatever| {
eprintln!("[ERROR] {}", Report::from_error(e));
});
let mut checked_files: Vec<PathBuf> = vec![];
let mut dicom_files: Vec<DicomFile> = vec![];
let mut presentation_contexts = HashSet::new();
for file in files {
if file.is_dir() {
for file in WalkDir::new(file.as_path())
.into_iter()
.filter_map(Result::ok)
.filter(|f| !f.file_type().is_dir())
{
checked_files.push(file.into_path());
}
} else {
checked_files.push(file);
}
}
for file in checked_files {
if verbose {
info!("Opening file '{}'...", file.display());
}
match check_file(&file) {
Ok(dicom_file) => {
presentation_contexts.insert((
dicom_file.sop_class_uid.to_string(),
dicom_file.file_transfer_syntax.clone(),
));
dicom_files.push(dicom_file);
}
Err(_) => {
warn!("Could not open file {} as DICOM", file.display());
}
}
}
if dicom_files.is_empty() {
eprintln!("No supported files to transfer");
std::process::exit(-1);
}
if verbose {
info!("Establishing association with '{}'...", &addr);
}
let mut scu_init = ClientAssociationOptions::new()
.calling_ae_title(calling_ae_title)
.max_pdu_length(max_pdu_length);
for (storage_sop_class_uid, transfer_syntax) in &presentation_contexts {
scu_init = scu_init.with_presentation_context(storage_sop_class_uid, vec![transfer_syntax]);
}
if let Some(called_ae_title) = called_ae_title {
scu_init = scu_init.called_ae_title(called_ae_title);
}
let mut scu = scu_init.establish_with(&addr).context(InitScuSnafu)?;
if verbose {
info!("Association established");
}
for file in &mut dicom_files {
// TODO(#106) transfer syntax conversion is currently not supported
let r: Result<_, Error> = check_presentation_contexts(file, scu.presentation_contexts())
.whatever_context::<_, _>("Could not choose a transfer syntax");
match r {
Ok((pc, ts)) => {
file.pc_selected = Some(pc);
file.ts_selected = Some(ts);
}
Err(e) => {
error!("{}", Report::from_error(e));
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
}
}
let progress_bar;
if !verbose {
progress_bar = Some(ProgressBar::new(dicom_files.len() as u64));
if let Some(pb) = progress_bar.as_ref() {
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40} {pos}/{len} {wide_msg}")
.expect("Invalid progress bar template"),
);
pb.enable_steady_tick(Duration::new(0, 480_000_000));
};
} else {
progress_bar = None;
}
for file in dicom_files {
if let (Some(pc_selected), Some(ts_uid_selected)) = (file.pc_selected, file.ts_selected) {
if let Some(pb) = &progress_bar {
pb.set_message(file.sop_instance_uid.clone());
}
let cmd = store_req_command(&file.sop_class_uid, &file.sop_instance_uid, message_id);
let mut cmd_data = Vec::with_capacity(128);
cmd.write_dataset_with_ts(
&mut cmd_data,
&dicom_transfer_syntax_registry::entries::IMPLICIT_VR_LITTLE_ENDIAN.erased(),
)
.context(CreateCommandSnafu)?;
let mut object_data = Vec::with_capacity(2048);
let dicom_file =
open_file(&file.file).whatever_context("Could not open listed DICOM file")?;
let ts_selected = TransferSyntaxRegistry
.get(&ts_uid_selected)
.whatever_context("Unsupported file transfer syntax")?;
dicom_file
.write_dataset_with_ts(&mut object_data, ts_selected)
.whatever_context("Could not write object dataset")?;
let nbytes = cmd_data.len() + object_data.len();
if verbose {
info!(
"Sending file {} (~ {} kB), uid={}, sop={}, ts={}",
file.file.display(),
nbytes / 1_000,
&file.sop_instance_uid,
&file.sop_class_uid,
ts_uid_selected,
);
}
if nbytes < scu.acceptor_max_pdu_length().saturating_sub(100) as usize {
let pdu = Pdu::PData {
data: vec![
PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Command,
is_last: true,
data: cmd_data,
},
PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Data,
is_last: true,
data: object_data,
},
],
};
scu.send(&pdu)
.whatever_context("Failed to send C-STORE-RQ")?;
} else {
let pdu = Pdu::PData {
data: vec![PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Command,
is_last: true,
data: cmd_data,
}],
};
scu.send(&pdu)
.whatever_context("Failed to send C-STORE-RQ command")?;
{
let mut pdata = scu.send_pdata(pc_selected.id);
pdata
.write_all(&object_data)
.whatever_context("Failed to send C-STORE-RQ P-Data")?;
}
}
if verbose {
debug!("Awaiting response...");
}
let rsp_pdu = scu
.receive()
.whatever_context("Failed to receive C-STORE-RSP")?;
match rsp_pdu {
Pdu::PData { data } => {
let data_value = &data[0];
let cmd_obj = InMemDicomObject::read_dataset_with_ts(
&data_value.data[..],
&dicom_transfer_syntax_registry::entries::IMPLICIT_VR_LITTLE_ENDIAN
.erased(),
)
.whatever_context("Could not read response from SCP")?;
if verbose {
debug!("Full response: {:?}", cmd_obj);
}
let status = cmd_obj
.element(tags::STATUS)
.whatever_context("Could not find status code in response")?
.to_int::<u16>()
.whatever_context("Status code in response is not a valid integer")?;
let storage_sop_instance_uid = file
.sop_instance_uid
.trim_end_matches(|c: char| c.is_whitespace() || c == '\0');
match status {
// Success
0 => {
if verbose {
info!("Successfully stored instance {}", storage_sop_instance_uid);
}
}
// Warning
1 | 0x0107 | 0x0116 | 0xB000..=0xBFFF => {
warn!(
"Possible issue storing instance `{}` (status code {:04X}H)",
storage_sop_instance_uid, status
);
}
0xFF00 | 0xFF01 => {
warn!(
"Possible issue storing instance `{}`: status is pending (status code {:04X}H)",
storage_sop_instance_uid, status
);
}
0xFE00 => {
error!(
"Could not store instance `{}`: operation cancelled",
storage_sop_instance_uid
);
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
_ => {
error!(
"Failed to store instance `{}` (status code {:04X}H)",
storage_sop_instance_uid, status
);
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
}
}
pdu @ Pdu::Unknown { .. }
| pdu @ Pdu::AssociationRQ { .. }
| pdu @ Pdu::AssociationAC { .. }
| pdu @ Pdu::AssociationRJ { .. }
| pdu @ Pdu::ReleaseRQ
| pdu @ Pdu::ReleaseRP
| pdu @ Pdu::AbortRQ { .. } => | {
error!("Unexpected SCP response: {:?}", pdu);
let _ = scu.abort();
std::process::exit(-2);
} | conditional_block |
|
main.rs | ,
/// Storage SOP Class UID
sop_class_uid: String,
/// Storage SOP Instance UID
sop_instance_uid: String,
/// File Transfer Syntax
file_transfer_syntax: String,
/// Transfer Syntax selected
ts_selected: Option<String>,
/// Presentation Context selected
pc_selected: Option<dicom_ul::pdu::PresentationContextResult>,
}
#[derive(Debug, Snafu)]
enum Error {
/// Could not initialize SCU
InitScu {
source: dicom_ul::association::client::Error,
},
/// Could not construct DICOM command
CreateCommand { source: dicom_object::WriteError },
#[snafu(whatever, display("{}", message))]
Other {
message: String,
#[snafu(source(from(Box<dyn std::error::Error + 'static>, Some)))]
source: Option<Box<dyn std::error::Error + 'static>>,
},
}
fn main() {
run().unwrap_or_else(|e| {
error!("{}", Report::from_error(e));
std::process::exit(-2);
});
}
fn run() -> Result<(), Error> {
let App {
addr,
files,
verbose,
message_id,
calling_ae_title,
called_ae_title,
max_pdu_length,
fail_first,
} = App::parse();
tracing::subscriber::set_global_default(
tracing_subscriber::FmtSubscriber::builder()
.with_max_level(if verbose { Level::DEBUG } else { Level::INFO })
.finish(),
)
.whatever_context("Could not set up global logging subscriber")
.unwrap_or_else(|e: Whatever| {
eprintln!("[ERROR] {}", Report::from_error(e));
});
let mut checked_files: Vec<PathBuf> = vec![];
let mut dicom_files: Vec<DicomFile> = vec![];
let mut presentation_contexts = HashSet::new();
for file in files {
if file.is_dir() {
for file in WalkDir::new(file.as_path())
.into_iter()
.filter_map(Result::ok)
.filter(|f| !f.file_type().is_dir())
{
checked_files.push(file.into_path());
}
} else {
checked_files.push(file);
}
}
for file in checked_files {
if verbose {
info!("Opening file '{}'...", file.display());
}
match check_file(&file) {
Ok(dicom_file) => {
presentation_contexts.insert((
dicom_file.sop_class_uid.to_string(),
dicom_file.file_transfer_syntax.clone(),
));
dicom_files.push(dicom_file);
}
Err(_) => {
warn!("Could not open file {} as DICOM", file.display());
}
}
}
if dicom_files.is_empty() {
eprintln!("No supported files to transfer");
std::process::exit(-1);
}
if verbose {
info!("Establishing association with '{}'...", &addr);
}
let mut scu_init = ClientAssociationOptions::new()
.calling_ae_title(calling_ae_title)
.max_pdu_length(max_pdu_length);
for (storage_sop_class_uid, transfer_syntax) in &presentation_contexts {
scu_init = scu_init.with_presentation_context(storage_sop_class_uid, vec![transfer_syntax]);
}
if let Some(called_ae_title) = called_ae_title {
scu_init = scu_init.called_ae_title(called_ae_title);
}
let mut scu = scu_init.establish_with(&addr).context(InitScuSnafu)?;
if verbose {
info!("Association established");
}
for file in &mut dicom_files {
// TODO(#106) transfer syntax conversion is currently not supported
let r: Result<_, Error> = check_presentation_contexts(file, scu.presentation_contexts())
.whatever_context::<_, _>("Could not choose a transfer syntax");
match r {
Ok((pc, ts)) => {
file.pc_selected = Some(pc);
file.ts_selected = Some(ts);
}
Err(e) => {
error!("{}", Report::from_error(e));
if fail_first {
let _ = scu.abort();
std::process::exit(-2);
}
}
}
}
let progress_bar;
if !verbose {
progress_bar = Some(ProgressBar::new(dicom_files.len() as u64));
if let Some(pb) = progress_bar.as_ref() {
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40} {pos}/{len} {wide_msg}")
.expect("Invalid progress bar template"),
);
pb.enable_steady_tick(Duration::new(0, 480_000_000));
};
} else {
progress_bar = None;
}
for file in dicom_files {
if let (Some(pc_selected), Some(ts_uid_selected)) = (file.pc_selected, file.ts_selected) {
if let Some(pb) = &progress_bar {
pb.set_message(file.sop_instance_uid.clone());
}
let cmd = store_req_command(&file.sop_class_uid, &file.sop_instance_uid, message_id);
let mut cmd_data = Vec::with_capacity(128);
cmd.write_dataset_with_ts(
&mut cmd_data,
&dicom_transfer_syntax_registry::entries::IMPLICIT_VR_LITTLE_ENDIAN.erased(),
)
.context(CreateCommandSnafu)?;
let mut object_data = Vec::with_capacity(2048);
let dicom_file =
open_file(&file.file).whatever_context("Could not open listed DICOM file")?;
let ts_selected = TransferSyntaxRegistry
.get(&ts_uid_selected) | let nbytes = cmd_data.len() + object_data.len();
if verbose {
info!(
"Sending file {} (~ {} kB), uid={}, sop={}, ts={}",
file.file.display(),
nbytes / 1_000,
&file.sop_instance_uid,
&file.sop_class_uid,
ts_uid_selected,
);
}
if nbytes < scu.acceptor_max_pdu_length().saturating_sub(100) as usize {
let pdu = Pdu::PData {
data: vec![
PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Command,
is_last: true,
data: cmd_data,
},
PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Data,
is_last: true,
data: object_data,
},
],
};
scu.send(&pdu)
.whatever_context("Failed to send C-STORE-RQ")?;
} else {
let pdu = Pdu::PData {
data: vec![PDataValue {
presentation_context_id: pc_selected.id,
value_type: PDataValueType::Command,
is_last: true,
data: cmd_data,
}],
};
scu.send(&pdu)
.whatever_context("Failed to send C-STORE-RQ command")?;
{
let mut pdata = scu.send_pdata(pc_selected.id);
pdata
.write_all(&object_data)
.whatever_context("Failed to send C-STORE-RQ P-Data")?;
}
}
if verbose {
debug!("Awaiting response...");
}
let rsp_pdu = scu
.receive()
.whatever_context("Failed to receive C-STORE-RSP")?;
match rsp_pdu {
Pdu::PData { data } => {
let data_value = &data[0];
let cmd_obj = InMemDicomObject::read_dataset_with_ts(
&data_value.data[..],
&dicom_transfer_syntax_registry::entries::IMPLICIT_VR_LITTLE_ENDIAN
.erased(),
)
.whatever_context("Could not read response from SCP")?;
if verbose {
debug!("Full response: {:?}", cmd_obj);
}
let status = cmd_obj
.element(tags::STATUS)
.whatever_context("Could not find status code in response")?
.to_int::<u16>()
.whatever_context("Status code in response is not a valid integer")?;
let storage_sop_instance_uid = file
.sop_instance_uid
.trim_end_matches(|c: char| c.is_whitespace() || c == '\0');
match status {
// Success
0 => {
if verbose {
info!("Successfully stored instance {}", storage_sop_instance_uid);
}
}
// Warning
1 | 0x0107 | 0x0116 | 0xB000..=0xBFFF => {
warn!(
"Possible issue storing instance `{}` (status code {:04X}H)",
storage_sop_instance_uid, status
);
}
0xFF00 | 0xFF01 => {
warn!(
"Possible issue storing instance `{}`: status is pending (status code {:04X}H)",
storage_sop_instance_uid, status
);
}
0xFE00 => {
error | .whatever_context("Unsupported file transfer syntax")?;
dicom_file
.write_dataset_with_ts(&mut object_data, ts_selected)
.whatever_context("Could not write object dataset")?;
| random_line_split |
args.rs | {
/// Add dependency to a Cargo.toml manifest file.
#[structopt(name = "add")]
#[structopt(
after_help = "This command allows you to add a dependency to a Cargo.toml manifest file. If <crate> is a github
or gitlab repository URL, or a local path, `cargo add` will try to automatically get the crate name
and set the appropriate `--git` or `--path` value.
Please note that Cargo treats versions like '1.2.3' as '^1.2.3' (and that '^1.2.3' is specified
as '>=1.2.3 and <2.0.0'). By default, `cargo add` will use this format, as it is the one that the
crates.io registry suggests. One goal of `cargo add` is to prevent you from using wildcard
dependencies (version set to '*')."
)]
Add(Args),
}
#[derive(Debug, StructOpt)]
pub struct Args {
/// Crates to be added.
#[structopt(name = "crate", required = true)]
pub crates: Vec<String>,
/// Rename a dependency in Cargo.toml,
/// https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml
/// Only works when specifying a single dependency.
#[structopt(long = "rename", short = "r")]
pub rename: Option<String>,
/// Add crate as development dependency.
#[structopt(long = "dev", short = "D", conflicts_with = "build")]
pub dev: bool,
/// Add crate as build dependency.
#[structopt(long = "build", short = "B", conflicts_with = "dev")]
pub build: bool,
/// Specify the version to grab from the registry(crates.io).
/// You can also specify version as part of name, e.g
/// `cargo add [email protected]`.
#[structopt(long = "vers", value_name = "uri", conflicts_with = "git")]
pub vers: Option<String>,
/// Specify a git repository to download the crate from.
#[structopt(
long = "git",
value_name = "uri",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub git: Option<String>,
/// Specify a git branch to download the crate from.
#[structopt(
long = "branch",
value_name = "branch",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub branch: Option<String>,
/// Specify the path the crate should be loaded from.
#[structopt(long = "path", conflicts_with = "git")]
pub path: Option<PathBuf>,
/// Add as dependency to the given target platform.
#[structopt(long = "target", conflicts_with = "dev", conflicts_with = "build")]
pub target: Option<String>,
/// Add as an optional dependency (for use in features).
#[structopt(long = "optional", conflicts_with = "dev", conflicts_with = "build")]
pub optional: bool,
/// Path to the manifest to add a dependency to.
#[structopt(long = "manifest-path", value_name = "path", conflicts_with = "pkgid")]
pub manifest_path: Option<PathBuf>,
/// Package id of the crate to add this dependency to.
#[structopt(
long = "package",
short = "p",
value_name = "pkgid",
conflicts_with = "path"
)]
pub pkgid: Option<String>,
/// Choose method of semantic version upgrade. Must be one of "none" (exact version, `=`
/// modifier), "patch" (`~` modifier), "minor" (`^` modifier), "all" (`>=`), or "default" (no
/// modifier).
#[structopt(
long = "upgrade",
value_name = "method",
possible_value = "none",
possible_value = "patch",
possible_value = "minor",
possible_value = "all",
possible_value = "default",
default_value = "default"
)]
pub upgrade: String,
/// Include prerelease versions when fetching from crates.io (e.g.
/// '0.6.0-alpha').
#[structopt(long = "allow-prerelease")]
pub allow_prerelease: bool,
/// Space-separated list of features to add. For an alternative approach to
/// enabling features, consider installing the `cargo-feature` utility.
#[structopt(long = "features", number_of_values = 1)]
pub features: Option<Vec<String>>,
/// Set `default-features = false` for the added dependency.
#[structopt(long = "no-default-features")]
pub no_default_features: bool,
/// Do not print any output in case of success.
#[structopt(long = "quiet", short = "q")]
pub quiet: bool,
/// Run without accessing the network
#[structopt(long = "offline")]
pub offline: bool,
/// Keep dependencies sorted
#[structopt(long = "sort", short = "s")]
pub sort: bool,
/// Registry to use
#[structopt(long = "registry", conflicts_with = "git", conflicts_with = "path")]
pub registry: Option<String>,
}
fn parse_version_req(s: &str) -> Result<&str> {
semver::VersionReq::parse(s).chain_err(|| "Invalid dependency version requirement")?;
Ok(s)
}
impl Args {
/// Get dependency section
pub fn get_section(&self) -> Vec<String> {
if self.dev {
vec!["dev-dependencies".to_owned()]
} else if self.build {
vec!["build-dependencies".to_owned()]
} else if let Some(ref target) = self.target {
if target.is_empty() {
panic!("Target specification may not be empty");
}
vec![
"target".to_owned(),
target.clone(),
"dependencies".to_owned(),
]
} else {
vec!["dependencies".to_owned()]
}
}
fn parse_single_dependency(&self, crate_name: &str) -> Result<Dependency> {
let crate_name = CrateName::new(crate_name);
if let Some(mut dependency) = crate_name.parse_as_version()? {
// crate specifier includes a version (e.g. `[email protected]`)
if let Some(ref url) = self.git {
let url = url.clone();
let version = dependency.version().unwrap().to_string();
return Err(ErrorKind::GitUrlWithVersion(url, version).into());
}
if let Some(ref path) = self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
Ok(dependency)
} else if crate_name.is_url_or_path() {
Ok(crate_name.parse_crate_name_from_uri()?)
} else {
assert_eq!(self.git.is_some() && self.vers.is_some(), false);
assert_eq!(self.git.is_some() && self.path.is_some(), false);
assert_eq!(self.git.is_some() && self.registry.is_some(), false);
assert_eq!(self.path.is_some() && self.registry.is_some(), false);
let mut dependency = Dependency::new(crate_name.name());
if let Some(repo) = &self.git {
dependency = dependency.set_git(repo, self.branch.clone());
}
if let Some(path) = &self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
if let Some(version) = &self.vers {
dependency = dependency.set_version(parse_version_req(version)?);
}
let registry_url = if let Some(registry) = &self.registry {
Some(registry_url(&find(&self.manifest_path)?, Some(registry))?)
} else {
None
};
if self.git.is_none() && self.path.is_none() && self.vers.is_none() {
let dep = get_latest_dependency(
crate_name.name(),
self.allow_prerelease,
&find(&self.manifest_path)?,
®istry_url,
)?;
let v = format!(
"{prefix}{version}",
prefix = self.get_upgrade_prefix(),
// If version is unavailable `get_latest_dependency` must have
// returned `Err(FetchVersionError::GetVersion)`
version = dep.version().unwrap_or_else(|| unreachable!())
);
dependency = dep.set_version(&v);
}
// Set the registry after getting the latest version as
// get_latest_dependency returns a registry-less Dependency
if let Some(registry) = &self.registry {
dependency = dependency.set_registry(registry);
}
Ok(dependency)
}
}
/// Build dependencies from arguments
pub fn parse_dependencies(&self) -> Result<Vec<Dependency>> {
if self.crates.len() > 1
&& (self.git.is_some() || self.path.is_some() || self.vers.is_some())
{
return Err(ErrorKind::MultipleCratesWithGitOrPathOrVers.into());
}
if self.crates.len() > 1 && self.rename.is_some() {
return Err(ErrorKind::MultipleCratesWithRename.into());
}
if self.crates.len() > 1 && self.features.is_some() {
return Err(ErrorKind::MultipleCratesWithFeatures.into());
}
self.crates
.iter()
.map(|crate_name| {
self | Command | identifier_name |
|
args.rs | 2.0.0'). By default, `cargo add` will use this format, as it is the one that the
crates.io registry suggests. One goal of `cargo add` is to prevent you from using wildcard
dependencies (version set to '*')."
)]
Add(Args),
}
#[derive(Debug, StructOpt)]
pub struct Args {
/// Crates to be added.
#[structopt(name = "crate", required = true)]
pub crates: Vec<String>,
/// Rename a dependency in Cargo.toml,
/// https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml
/// Only works when specifying a single dependency.
#[structopt(long = "rename", short = "r")]
pub rename: Option<String>,
/// Add crate as development dependency.
#[structopt(long = "dev", short = "D", conflicts_with = "build")]
pub dev: bool,
/// Add crate as build dependency.
#[structopt(long = "build", short = "B", conflicts_with = "dev")]
pub build: bool,
/// Specify the version to grab from the registry(crates.io).
/// You can also specify version as part of name, e.g
/// `cargo add [email protected]`.
#[structopt(long = "vers", value_name = "uri", conflicts_with = "git")]
pub vers: Option<String>,
/// Specify a git repository to download the crate from.
#[structopt(
long = "git",
value_name = "uri",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub git: Option<String>,
/// Specify a git branch to download the crate from.
#[structopt(
long = "branch",
value_name = "branch",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub branch: Option<String>,
/// Specify the path the crate should be loaded from.
#[structopt(long = "path", conflicts_with = "git")]
pub path: Option<PathBuf>,
/// Add as dependency to the given target platform.
#[structopt(long = "target", conflicts_with = "dev", conflicts_with = "build")]
pub target: Option<String>,
/// Add as an optional dependency (for use in features).
#[structopt(long = "optional", conflicts_with = "dev", conflicts_with = "build")]
pub optional: bool,
/// Path to the manifest to add a dependency to.
#[structopt(long = "manifest-path", value_name = "path", conflicts_with = "pkgid")]
pub manifest_path: Option<PathBuf>,
/// Package id of the crate to add this dependency to.
#[structopt(
long = "package",
short = "p",
value_name = "pkgid",
conflicts_with = "path"
)]
pub pkgid: Option<String>,
/// Choose method of semantic version upgrade. Must be one of "none" (exact version, `=`
/// modifier), "patch" (`~` modifier), "minor" (`^` modifier), "all" (`>=`), or "default" (no
/// modifier).
#[structopt(
long = "upgrade",
value_name = "method",
possible_value = "none",
possible_value = "patch",
possible_value = "minor",
possible_value = "all",
possible_value = "default",
default_value = "default"
)]
pub upgrade: String,
/// Include prerelease versions when fetching from crates.io (e.g.
/// '0.6.0-alpha').
#[structopt(long = "allow-prerelease")]
pub allow_prerelease: bool,
/// Space-separated list of features to add. For an alternative approach to
/// enabling features, consider installing the `cargo-feature` utility.
#[structopt(long = "features", number_of_values = 1)]
pub features: Option<Vec<String>>,
/// Set `default-features = false` for the added dependency.
#[structopt(long = "no-default-features")]
pub no_default_features: bool,
/// Do not print any output in case of success.
#[structopt(long = "quiet", short = "q")]
pub quiet: bool,
/// Run without accessing the network
#[structopt(long = "offline")]
pub offline: bool,
/// Keep dependencies sorted
#[structopt(long = "sort", short = "s")]
pub sort: bool,
/// Registry to use
#[structopt(long = "registry", conflicts_with = "git", conflicts_with = "path")]
pub registry: Option<String>,
}
fn parse_version_req(s: &str) -> Result<&str> {
semver::VersionReq::parse(s).chain_err(|| "Invalid dependency version requirement")?;
Ok(s)
}
impl Args {
/// Get dependency section
pub fn get_section(&self) -> Vec<String> {
if self.dev {
vec!["dev-dependencies".to_owned()]
} else if self.build {
vec!["build-dependencies".to_owned()]
} else if let Some(ref target) = self.target {
if target.is_empty() {
panic!("Target specification may not be empty");
}
vec![
"target".to_owned(),
target.clone(),
"dependencies".to_owned(),
]
} else {
vec!["dependencies".to_owned()]
}
}
fn parse_single_dependency(&self, crate_name: &str) -> Result<Dependency> {
let crate_name = CrateName::new(crate_name);
if let Some(mut dependency) = crate_name.parse_as_version()? {
// crate specifier includes a version (e.g. `[email protected]`)
if let Some(ref url) = self.git {
let url = url.clone();
let version = dependency.version().unwrap().to_string();
return Err(ErrorKind::GitUrlWithVersion(url, version).into());
}
if let Some(ref path) = self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
Ok(dependency)
} else if crate_name.is_url_or_path() {
Ok(crate_name.parse_crate_name_from_uri()?)
} else | None
};
if self.git.is_none() && self.path.is_none() && self.vers.is_none() {
let dep = get_latest_dependency(
crate_name.name(),
self.allow_prerelease,
&find(&self.manifest_path)?,
®istry_url,
)?;
let v = format!(
"{prefix}{version}",
prefix = self.get_upgrade_prefix(),
// If version is unavailable `get_latest_dependency` must have
// returned `Err(FetchVersionError::GetVersion)`
version = dep.version().unwrap_or_else(|| unreachable!())
);
dependency = dep.set_version(&v);
}
// Set the registry after getting the latest version as
// get_latest_dependency returns a registry-less Dependency
if let Some(registry) = &self.registry {
dependency = dependency.set_registry(registry);
}
Ok(dependency)
}
}
/// Build dependencies from arguments
pub fn parse_dependencies(&self) -> Result<Vec<Dependency>> {
if self.crates.len() > 1
&& (self.git.is_some() || self.path.is_some() || self.vers.is_some())
{
return Err(ErrorKind::MultipleCratesWithGitOrPathOrVers.into());
}
if self.crates.len() > 1 && self.rename.is_some() {
return Err(ErrorKind::MultipleCratesWithRename.into());
}
if self.crates.len() > 1 && self.features.is_some() {
return Err(ErrorKind::MultipleCratesWithFeatures.into());
}
self.crates
.iter()
.map(|crate_name| {
self.parse_single_dependency(crate_name).map(|x| {
let mut x = x
.set_optional(self.optional)
.set_features(self.features.clone())
.set_default_features(!self.no_default_features);
if let Some(ref rename) = self.rename {
x = x.set_rename(rename);
}
x
})
})
.collect()
}
fn get_upgrade_prefix(&self) -> &'static str {
match self.upgrade.as_ref() {
"default" => "",
"none" => "=",
"patch" => "~",
"minor" => "^",
"all" => ">=",
_ => unreachable! | {
assert_eq!(self.git.is_some() && self.vers.is_some(), false);
assert_eq!(self.git.is_some() && self.path.is_some(), false);
assert_eq!(self.git.is_some() && self.registry.is_some(), false);
assert_eq!(self.path.is_some() && self.registry.is_some(), false);
let mut dependency = Dependency::new(crate_name.name());
if let Some(repo) = &self.git {
dependency = dependency.set_git(repo, self.branch.clone());
}
if let Some(path) = &self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
if let Some(version) = &self.vers {
dependency = dependency.set_version(parse_version_req(version)?);
}
let registry_url = if let Some(registry) = &self.registry {
Some(registry_url(&find(&self.manifest_path)?, Some(registry))?)
} else { | conditional_block |
args.rs | 2.0.0'). By default, `cargo add` will use this format, as it is the one that the
crates.io registry suggests. One goal of `cargo add` is to prevent you from using wildcard
dependencies (version set to '*')."
)]
Add(Args),
}
#[derive(Debug, StructOpt)]
pub struct Args {
/// Crates to be added.
#[structopt(name = "crate", required = true)]
pub crates: Vec<String>,
/// Rename a dependency in Cargo.toml,
/// https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml
/// Only works when specifying a single dependency.
#[structopt(long = "rename", short = "r")]
pub rename: Option<String>,
/// Add crate as development dependency.
#[structopt(long = "dev", short = "D", conflicts_with = "build")]
pub dev: bool,
/// Add crate as build dependency.
#[structopt(long = "build", short = "B", conflicts_with = "dev")]
pub build: bool,
/// Specify the version to grab from the registry(crates.io).
/// You can also specify version as part of name, e.g
/// `cargo add [email protected]`.
#[structopt(long = "vers", value_name = "uri", conflicts_with = "git")]
pub vers: Option<String>,
/// Specify a git repository to download the crate from.
#[structopt(
long = "git",
value_name = "uri",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub git: Option<String>,
/// Specify a git branch to download the crate from.
#[structopt(
long = "branch",
value_name = "branch",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub branch: Option<String>,
/// Specify the path the crate should be loaded from.
#[structopt(long = "path", conflicts_with = "git")]
pub path: Option<PathBuf>,
/// Add as dependency to the given target platform.
#[structopt(long = "target", conflicts_with = "dev", conflicts_with = "build")]
pub target: Option<String>,
/// Add as an optional dependency (for use in features).
#[structopt(long = "optional", conflicts_with = "dev", conflicts_with = "build")]
pub optional: bool,
/// Path to the manifest to add a dependency to.
#[structopt(long = "manifest-path", value_name = "path", conflicts_with = "pkgid")]
pub manifest_path: Option<PathBuf>,
/// Package id of the crate to add this dependency to.
#[structopt(
long = "package",
short = "p",
value_name = "pkgid",
conflicts_with = "path"
)]
pub pkgid: Option<String>,
/// Choose method of semantic version upgrade. Must be one of "none" (exact version, `=`
/// modifier), "patch" (`~` modifier), "minor" (`^` modifier), "all" (`>=`), or "default" (no
/// modifier).
#[structopt(
long = "upgrade",
value_name = "method",
possible_value = "none",
possible_value = "patch",
possible_value = "minor",
possible_value = "all",
possible_value = "default",
default_value = "default"
)]
pub upgrade: String,
/// Include prerelease versions when fetching from crates.io (e.g.
/// '0.6.0-alpha').
#[structopt(long = "allow-prerelease")]
pub allow_prerelease: bool,
/// Space-separated list of features to add. For an alternative approach to
/// enabling features, consider installing the `cargo-feature` utility.
#[structopt(long = "features", number_of_values = 1)]
pub features: Option<Vec<String>>,
/// Set `default-features = false` for the added dependency.
#[structopt(long = "no-default-features")]
pub no_default_features: bool,
/// Do not print any output in case of success.
#[structopt(long = "quiet", short = "q")]
pub quiet: bool,
/// Run without accessing the network
#[structopt(long = "offline")]
pub offline: bool,
/// Keep dependencies sorted
#[structopt(long = "sort", short = "s")]
pub sort: bool,
/// Registry to use
#[structopt(long = "registry", conflicts_with = "git", conflicts_with = "path")]
pub registry: Option<String>,
}
fn parse_version_req(s: &str) -> Result<&str> {
semver::VersionReq::parse(s).chain_err(|| "Invalid dependency version requirement")?;
Ok(s)
}
impl Args {
/// Get dependency section
pub fn get_section(&self) -> Vec<String> {
if self.dev {
vec!["dev-dependencies".to_owned()]
} else if self.build {
vec!["build-dependencies".to_owned()]
} else if let Some(ref target) = self.target {
if target.is_empty() {
panic!("Target specification may not be empty");
}
vec![
"target".to_owned(),
target.clone(),
"dependencies".to_owned(),
]
} else {
vec!["dependencies".to_owned()]
}
}
fn parse_single_dependency(&self, crate_name: &str) -> Result<Dependency> {
let crate_name = CrateName::new(crate_name);
if let Some(mut dependency) = crate_name.parse_as_version()? {
// crate specifier includes a version (e.g. `[email protected]`)
if let Some(ref url) = self.git {
let url = url.clone();
let version = dependency.version().unwrap().to_string();
return Err(ErrorKind::GitUrlWithVersion(url, version).into());
}
if let Some(ref path) = self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
Ok(dependency)
} else if crate_name.is_url_or_path() {
Ok(crate_name.parse_crate_name_from_uri()?)
} else {
assert_eq!(self.git.is_some() && self.vers.is_some(), false);
assert_eq!(self.git.is_some() && self.path.is_some(), false);
assert_eq!(self.git.is_some() && self.registry.is_some(), false);
assert_eq!(self.path.is_some() && self.registry.is_some(), false);
let mut dependency = Dependency::new(crate_name.name());
if let Some(repo) = &self.git {
dependency = dependency.set_git(repo, self.branch.clone());
}
if let Some(path) = &self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
if let Some(version) = &self.vers {
dependency = dependency.set_version(parse_version_req(version)?);
}
let registry_url = if let Some(registry) = &self.registry {
Some(registry_url(&find(&self.manifest_path)?, Some(registry))?)
} else {
None
};
if self.git.is_none() && self.path.is_none() && self.vers.is_none() {
let dep = get_latest_dependency(
crate_name.name(),
self.allow_prerelease,
&find(&self.manifest_path)?,
®istry_url,
)?;
let v = format!(
"{prefix}{version}",
prefix = self.get_upgrade_prefix(),
// If version is unavailable `get_latest_dependency` must have
// returned `Err(FetchVersionError::GetVersion)`
version = dep.version().unwrap_or_else(|| unreachable!())
);
dependency = dep.set_version(&v);
}
// Set the registry after getting the latest version as
// get_latest_dependency returns a registry-less Dependency
if let Some(registry) = &self.registry {
dependency = dependency.set_registry(registry);
}
Ok(dependency)
}
}
/// Build dependencies from arguments
pub fn parse_dependencies(&self) -> Result<Vec<Dependency>> | .set_optional(self.optional)
.set_features(self.features.clone())
.set_default_features(!self.no_default_features);
if let Some(ref rename) = self.rename {
x = x.set_rename(rename);
}
x
})
})
.collect()
}
fn get_upgrade_prefix(&self) -> &'static str {
match self.upgrade.as_ref() {
"default" => "",
"none" => "=",
"patch" => "~",
"minor" => "^",
"all" => ">=",
_ => unreachable!(),
| {
if self.crates.len() > 1
&& (self.git.is_some() || self.path.is_some() || self.vers.is_some())
{
return Err(ErrorKind::MultipleCratesWithGitOrPathOrVers.into());
}
if self.crates.len() > 1 && self.rename.is_some() {
return Err(ErrorKind::MultipleCratesWithRename.into());
}
if self.crates.len() > 1 && self.features.is_some() {
return Err(ErrorKind::MultipleCratesWithFeatures.into());
}
self.crates
.iter()
.map(|crate_name| {
self.parse_single_dependency(crate_name).map(|x| {
let mut x = x | identifier_body |
args.rs | 2.0.0'). By default, `cargo add` will use this format, as it is the one that the
crates.io registry suggests. One goal of `cargo add` is to prevent you from using wildcard
dependencies (version set to '*')."
)]
Add(Args),
}
#[derive(Debug, StructOpt)]
pub struct Args {
/// Crates to be added.
#[structopt(name = "crate", required = true)]
pub crates: Vec<String>,
/// Rename a dependency in Cargo.toml,
/// https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml
/// Only works when specifying a single dependency.
#[structopt(long = "rename", short = "r")]
pub rename: Option<String>,
/// Add crate as development dependency.
#[structopt(long = "dev", short = "D", conflicts_with = "build")]
pub dev: bool,
/// Add crate as build dependency.
#[structopt(long = "build", short = "B", conflicts_with = "dev")]
pub build: bool,
/// Specify the version to grab from the registry(crates.io).
/// You can also specify version as part of name, e.g
/// `cargo add [email protected]`.
#[structopt(long = "vers", value_name = "uri", conflicts_with = "git")]
pub vers: Option<String>,
/// Specify a git repository to download the crate from.
#[structopt(
long = "git",
value_name = "uri",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub git: Option<String>,
/// Specify a git branch to download the crate from.
#[structopt(
long = "branch",
value_name = "branch",
conflicts_with = "vers",
conflicts_with = "path"
)]
pub branch: Option<String>,
/// Specify the path the crate should be loaded from.
#[structopt(long = "path", conflicts_with = "git")]
pub path: Option<PathBuf>, | #[structopt(long = "target", conflicts_with = "dev", conflicts_with = "build")]
pub target: Option<String>,
/// Add as an optional dependency (for use in features).
#[structopt(long = "optional", conflicts_with = "dev", conflicts_with = "build")]
pub optional: bool,
/// Path to the manifest to add a dependency to.
#[structopt(long = "manifest-path", value_name = "path", conflicts_with = "pkgid")]
pub manifest_path: Option<PathBuf>,
/// Package id of the crate to add this dependency to.
#[structopt(
long = "package",
short = "p",
value_name = "pkgid",
conflicts_with = "path"
)]
pub pkgid: Option<String>,
/// Choose method of semantic version upgrade. Must be one of "none" (exact version, `=`
/// modifier), "patch" (`~` modifier), "minor" (`^` modifier), "all" (`>=`), or "default" (no
/// modifier).
#[structopt(
long = "upgrade",
value_name = "method",
possible_value = "none",
possible_value = "patch",
possible_value = "minor",
possible_value = "all",
possible_value = "default",
default_value = "default"
)]
pub upgrade: String,
/// Include prerelease versions when fetching from crates.io (e.g.
/// '0.6.0-alpha').
#[structopt(long = "allow-prerelease")]
pub allow_prerelease: bool,
/// Space-separated list of features to add. For an alternative approach to
/// enabling features, consider installing the `cargo-feature` utility.
#[structopt(long = "features", number_of_values = 1)]
pub features: Option<Vec<String>>,
/// Set `default-features = false` for the added dependency.
#[structopt(long = "no-default-features")]
pub no_default_features: bool,
/// Do not print any output in case of success.
#[structopt(long = "quiet", short = "q")]
pub quiet: bool,
/// Run without accessing the network
#[structopt(long = "offline")]
pub offline: bool,
/// Keep dependencies sorted
#[structopt(long = "sort", short = "s")]
pub sort: bool,
/// Registry to use
#[structopt(long = "registry", conflicts_with = "git", conflicts_with = "path")]
pub registry: Option<String>,
}
fn parse_version_req(s: &str) -> Result<&str> {
semver::VersionReq::parse(s).chain_err(|| "Invalid dependency version requirement")?;
Ok(s)
}
impl Args {
/// Get dependency section
pub fn get_section(&self) -> Vec<String> {
if self.dev {
vec!["dev-dependencies".to_owned()]
} else if self.build {
vec!["build-dependencies".to_owned()]
} else if let Some(ref target) = self.target {
if target.is_empty() {
panic!("Target specification may not be empty");
}
vec![
"target".to_owned(),
target.clone(),
"dependencies".to_owned(),
]
} else {
vec!["dependencies".to_owned()]
}
}
fn parse_single_dependency(&self, crate_name: &str) -> Result<Dependency> {
let crate_name = CrateName::new(crate_name);
if let Some(mut dependency) = crate_name.parse_as_version()? {
// crate specifier includes a version (e.g. `[email protected]`)
if let Some(ref url) = self.git {
let url = url.clone();
let version = dependency.version().unwrap().to_string();
return Err(ErrorKind::GitUrlWithVersion(url, version).into());
}
if let Some(ref path) = self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
Ok(dependency)
} else if crate_name.is_url_or_path() {
Ok(crate_name.parse_crate_name_from_uri()?)
} else {
assert_eq!(self.git.is_some() && self.vers.is_some(), false);
assert_eq!(self.git.is_some() && self.path.is_some(), false);
assert_eq!(self.git.is_some() && self.registry.is_some(), false);
assert_eq!(self.path.is_some() && self.registry.is_some(), false);
let mut dependency = Dependency::new(crate_name.name());
if let Some(repo) = &self.git {
dependency = dependency.set_git(repo, self.branch.clone());
}
if let Some(path) = &self.path {
dependency = dependency.set_path(path.to_str().unwrap());
}
if let Some(version) = &self.vers {
dependency = dependency.set_version(parse_version_req(version)?);
}
let registry_url = if let Some(registry) = &self.registry {
Some(registry_url(&find(&self.manifest_path)?, Some(registry))?)
} else {
None
};
if self.git.is_none() && self.path.is_none() && self.vers.is_none() {
let dep = get_latest_dependency(
crate_name.name(),
self.allow_prerelease,
&find(&self.manifest_path)?,
®istry_url,
)?;
let v = format!(
"{prefix}{version}",
prefix = self.get_upgrade_prefix(),
// If version is unavailable `get_latest_dependency` must have
// returned `Err(FetchVersionError::GetVersion)`
version = dep.version().unwrap_or_else(|| unreachable!())
);
dependency = dep.set_version(&v);
}
// Set the registry after getting the latest version as
// get_latest_dependency returns a registry-less Dependency
if let Some(registry) = &self.registry {
dependency = dependency.set_registry(registry);
}
Ok(dependency)
}
}
/// Build dependencies from arguments
pub fn parse_dependencies(&self) -> Result<Vec<Dependency>> {
if self.crates.len() > 1
&& (self.git.is_some() || self.path.is_some() || self.vers.is_some())
{
return Err(ErrorKind::MultipleCratesWithGitOrPathOrVers.into());
}
if self.crates.len() > 1 && self.rename.is_some() {
return Err(ErrorKind::MultipleCratesWithRename.into());
}
if self.crates.len() > 1 && self.features.is_some() {
return Err(ErrorKind::MultipleCratesWithFeatures.into());
}
self.crates
.iter()
.map(|crate_name| {
self.parse_single_dependency(crate_name).map(|x| {
let mut x = x
.set_optional(self.optional)
.set_features(self.features.clone())
.set_default_features(!self.no_default_features);
if let Some(ref rename) = self.rename {
x = x.set_rename(rename);
}
x
})
})
.collect()
}
fn get_upgrade_prefix(&self) -> &'static str {
match self.upgrade.as_ref() {
"default" => "",
"none" => "=",
"patch" => "~",
"minor" => "^",
"all" => ">=",
_ => unreachable!(),
|
/// Add as dependency to the given target platform. | random_line_split |
DOMHelper.ts | E extends never
? O extends never
? (Element | null)
: ReturnType<O> | null
: O extends never
? (ReturnType<E> | null)
: ReturnType<O> | ReturnType<E> {
const element = document.querySelector(selector)
if (element) {
return existCallback ? existCallback(element as EE) : element
}
return otherwise ? otherwise() : null
}
function isInCodePage() {
const branchListSelector = '#branch-select-menu.branch-select-menu'
return Boolean($(branchListSelector))
}
function getBranches() {
const branchSelector = '.branch-select-menu .select-menu-list > div .select-menu-item-text'
const branchElements = Array.from(document.querySelectorAll(branchSelector))
return branchElements.map(element => element.innerHTML.trim())
}
function getCurrentBranch() {
const selectedBranchButtonSelector = '.repository-content .branch-select-menu summary'
const branchButtonElement: HTMLElement = $(selectedBranchButtonSelector)
if (branchButtonElement) {
const branchNameSpanElement = branchButtonElement.querySelector('span')
if (branchNameSpanElement) {
const partialBranchNameFromInnerText = branchNameSpanElement.innerText
if (!partialBranchNameFromInnerText.includes('…')) return partialBranchNameFromInnerText
}
const defaultTitle = 'Switch branches or tags'
const title = branchButtonElement.title.trim()
if (title !== defaultTitle && !title.includes(' ')) return title
}
const findFileButtonSelector =
'#js-repo-pjax-container .repository-content .file-navigation a[data-hotkey="t"]'
const urlFromFindFileButton: string | undefined = $(
findFileButtonSelector,
element => (element as HTMLAnchorElement).href,
)
if (urlFromFindFileButton) {
const commitPathRegex = /^(.*?)\/(.*?)\/find\/(.*?)$/
const result = urlFromFindFileButton.match(commitPathRegex)
if (result) {
const [_, userName, repoName, branchName] = result
if (!branchName.includes(' ')) return branchName
}
}
raiseError(new Error('cannot get current branch'))
}
/**
* add the logo element into DOM
*
*/
function insertLogoMountPoint() {
const logoSelector = '.gitako .gitako-logo'
return $(logoSelector) || createLogoMountPoint()
}
function createLogoMountPoint() {
const logoMountElement = document.createElement('div')
logoMountElement.setAttribute('class', 'gitako-logo-mount-point')
document.body.appendChild(logoMountElement)
return logoMountElement
}
/**
* content above the file navigation bar is same for all pages of the repo
* use this function to scroll down a bit to hide them
*/
function scrollToRepoContent() {
const repositoryContentSelector = '.repository-content'
// do NOT use behavior: smooth here as it will scroll horizontally
$(repositoryContentSelector, repositoryContentElement =>
repositoryContentElement.scrollIntoView(),
)
}
const pjax = new PJAX({
elements: '.pjax-link',
selectors: ['.repository-content', 'title'],
scrollTo: false,
analytics: false,
cacheBust: false,
forceCache: true, // TODO: merge namespace, add forceCache
})
function loadWithPJAX(URL: string) {
NProgress.start()
pjax.loadUrl(URL, { scrollTo: 0 })
}
/**
* there are few types of pages on GitHub, mainly
* 1. raw text: code
* 2. rendered content: like Markdown
* 3. preview: like image
*/
const PAGE_TYPES = {
RAW_TEXT: 'raw_text',
RENDERED: 'rendered',
// PREVIEW: 'preview',
OTHERS: 'others',
}
/**
* this function tries to tell which type current page is of
*
* note: not determining through file extension here
* because there might be files using wrong extension name
*
* TODO: distinguish type 'preview'
*/
function getCurrentPageType() {
const blobWrapperSelector = '.repository-content .file .blob-wrapper table'
const readmeSelector = '.repository-content .readme'
return (
$(blobWrapperSelector, () => PAGE_TYPES.RAW_TEXT) ||
$(readmeSelector, () => PAGE_TYPES.RENDERED) ||
PAGE_TYPES.OTHERS
)
}
export const REPO_TYPE_PRIVATE = 'private'
export const REPO_TYPE_PUBLIC = 'public'
function getRepoPageType() {
const headerSelector = `#js-repo-pjax-container .pagehead.repohead h1`
return $(headerSelector, header => {
const repoPageTypes = [REPO_TYPE_PRIVATE, REPO_TYPE_PUBLIC]
for (const repoPageType of repoPageTypes) {
if (header.classList.contains(repoPageType)) {
return repoPageType
}
}
raiseError(new Error('cannot get repo page type'))
})
}
/**
* add copy file content buttons to button groups
* click these buttons will copy file content to clipboard
*/
function attachCopyFileBtn() {
/**
* get text content of raw text content
*/
function getCodeElement() {
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const codeContentSelector = '.repository-content .file .data table'
const codeContentElement = $(codeContentSelector)
if (!codeContentElement) {
raiseError(new Error('cannot find code content element'))
}
return codeContentElement
}
}
| * change inner text of copy file button to give feedback
* @param {element} copyFileBtn
* @param {string} text
*/
function setTempCopyFileBtnText(copyFileBtn: HTMLButtonElement, text: string) {
copyFileBtn.innerText = text
window.setTimeout(() => (copyFileBtn.innerText = 'Copy file'), 1000)
}
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const btnGroupSelector = [
// the button group next to navigation bar
'.repository-content .file-navigation.js-zeroclipboard-container .BtnGroup',
// the button group in file content header
'.repository-content .file .file-header .file-actions .BtnGroup',
].join(', ')
const btnGroups = document.querySelectorAll(btnGroupSelector)
btnGroups.forEach(btnGroup => {
const copyFileBtn = document.createElement('button')
copyFileBtn.classList.add('btn', 'btn-sm', 'BtnGroup-item', 'copy-file-btn')
copyFileBtn.innerText = 'Copy file'
copyFileBtn.addEventListener('click', () => {
const codeElement = getCodeElement()
if (codeElement) {
if (copyElementContent(codeElement)) {
setTempCopyFileBtnText(copyFileBtn, 'Success!')
} else {
setTempCopyFileBtnText(copyFileBtn, 'Copy failed!')
}
}
})
btnGroup.insertBefore(copyFileBtn, btnGroup.lastChild)
})
}
}
/**
* copy content of a DOM element to clipboard
* @param {element} element
* @returns {boolean} whether copy is successful
*/
function copyElementContent(element: Element) {
let selection = window.getSelection()
if (selection) selection.removeAllRanges()
const range = document.createRange()
range.selectNode(element)
selection = window.getSelection()
if (selection) selection.addRange(range)
const isCopySuccessful = document.execCommand('copy')
selection = window.getSelection()
if (selection) selection.removeAllRanges()
return isCopySuccessful
}
/**
* create a copy file content button `clippy`
* once mouse enters a code snippet of markdown, move clippy into it
* user can copy the snippet's content by click it
*
* TODO: 'reactify' it
*/
function createClippy() {
function setTempClippyIconFeedback(clippy: Element, type: 'success' | 'fail') {
const tempIconClassName = type === 'success' ? 'success' : 'fail'
clippy.classList.add(tempIconClassName)
window.setTimeout(() => {
clippy.classList.remove(tempIconClassName)
}, 1000)
}
/**
* <div class="clippy-wrapper">
* <button class="clippy">
* <i class="octicon octicon-clippy" />
* </button>
* </div>
*/
const clippyWrapper = document.createElement('div')
clippyWrapper.classList.add('clippy-wrapper')
const clippy = document.createElement('button')
clippy.classList.add('clippy')
const clippyIcon = document.createElement('i')
clippyIcon.classList.add('icon')
clippyWrapper.appendChild(clippy)
clippy.appendChild(clippyIcon)
// set clipboard with current code snippet element's content
clippy.addEventListener('click', function onClippyClick() {
if (copyElementContent(currentCodeSnippetElement)) {
setTempClippyIconFeedback(clippy, 'success')
} else {
setTempClippyIconFeedback(clippy, 'fail')
}
})
return clippyWrapper
}
const clippy = createClippy()
let currentCodeSnippetElement: Element
function attachCopySnippet() {
const readmeSelector = '.repository-content div#readme'
return $(readmeSelector, () => {
const readmeArticleSelector = '.repository-content div#readme article'
$(
readmeArticleSelector,
readmeElement =>
readmeElement.addEventListener('mouseover', e => {
| /** | random_line_split |
DOMHelper.ts | = '.branch-select-menu .select-menu-list > div .select-menu-item-text'
const branchElements = Array.from(document.querySelectorAll(branchSelector))
return branchElements.map(element => element.innerHTML.trim())
}
function getCurrentBranch() {
const selectedBranchButtonSelector = '.repository-content .branch-select-menu summary'
const branchButtonElement: HTMLElement = $(selectedBranchButtonSelector)
if (branchButtonElement) {
const branchNameSpanElement = branchButtonElement.querySelector('span')
if (branchNameSpanElement) {
const partialBranchNameFromInnerText = branchNameSpanElement.innerText
if (!partialBranchNameFromInnerText.includes('…')) return partialBranchNameFromInnerText
}
const defaultTitle = 'Switch branches or tags'
const title = branchButtonElement.title.trim()
if (title !== defaultTitle && !title.includes(' ')) return title
}
const findFileButtonSelector =
'#js-repo-pjax-container .repository-content .file-navigation a[data-hotkey="t"]'
const urlFromFindFileButton: string | undefined = $(
findFileButtonSelector,
element => (element as HTMLAnchorElement).href,
)
if (urlFromFindFileButton) {
const commitPathRegex = /^(.*?)\/(.*?)\/find\/(.*?)$/
const result = urlFromFindFileButton.match(commitPathRegex)
if (result) {
const [_, userName, repoName, branchName] = result
if (!branchName.includes(' ')) return branchName
}
}
raiseError(new Error('cannot get current branch'))
}
/**
* add the logo element into DOM
*
*/
function insertLogoMountPoint() {
const logoSelector = '.gitako .gitako-logo'
return $(logoSelector) || createLogoMountPoint()
}
function createLogoMountPoint() {
const logoMountElement = document.createElement('div')
logoMountElement.setAttribute('class', 'gitako-logo-mount-point')
document.body.appendChild(logoMountElement)
return logoMountElement
}
/**
* content above the file navigation bar is same for all pages of the repo
* use this function to scroll down a bit to hide them
*/
function scrollToRepoContent() {
const repositoryContentSelector = '.repository-content'
// do NOT use behavior: smooth here as it will scroll horizontally
$(repositoryContentSelector, repositoryContentElement =>
repositoryContentElement.scrollIntoView(),
)
}
const pjax = new PJAX({
elements: '.pjax-link',
selectors: ['.repository-content', 'title'],
scrollTo: false,
analytics: false,
cacheBust: false,
forceCache: true, // TODO: merge namespace, add forceCache
})
function loadWithPJAX(URL: string) {
NProgress.start()
pjax.loadUrl(URL, { scrollTo: 0 })
}
/**
* there are few types of pages on GitHub, mainly
* 1. raw text: code
* 2. rendered content: like Markdown
* 3. preview: like image
*/
const PAGE_TYPES = {
RAW_TEXT: 'raw_text',
RENDERED: 'rendered',
// PREVIEW: 'preview',
OTHERS: 'others',
}
/**
* this function tries to tell which type current page is of
*
* note: not determining through file extension here
* because there might be files using wrong extension name
*
* TODO: distinguish type 'preview'
*/
function getCurrentPageType() {
const blobWrapperSelector = '.repository-content .file .blob-wrapper table'
const readmeSelector = '.repository-content .readme'
return (
$(blobWrapperSelector, () => PAGE_TYPES.RAW_TEXT) ||
$(readmeSelector, () => PAGE_TYPES.RENDERED) ||
PAGE_TYPES.OTHERS
)
}
export const REPO_TYPE_PRIVATE = 'private'
export const REPO_TYPE_PUBLIC = 'public'
function getRepoPageType() {
const headerSelector = `#js-repo-pjax-container .pagehead.repohead h1`
return $(headerSelector, header => {
const repoPageTypes = [REPO_TYPE_PRIVATE, REPO_TYPE_PUBLIC]
for (const repoPageType of repoPageTypes) {
if (header.classList.contains(repoPageType)) {
return repoPageType
}
}
raiseError(new Error('cannot get repo page type'))
})
}
/**
* add copy file content buttons to button groups
* click these buttons will copy file content to clipboard
*/
function attachCopyFileBtn() {
/**
* get text content of raw text content
*/
function getCodeElement() {
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const codeContentSelector = '.repository-content .file .data table'
const codeContentElement = $(codeContentSelector)
if (!codeContentElement) {
raiseError(new Error('cannot find code content element'))
}
return codeContentElement
}
}
/**
* change inner text of copy file button to give feedback
* @param {element} copyFileBtn
* @param {string} text
*/
function setTempCopyFileBtnText(copyFileBtn: HTMLButtonElement, text: string) {
copyFileBtn.innerText = text
window.setTimeout(() => (copyFileBtn.innerText = 'Copy file'), 1000)
}
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const btnGroupSelector = [
// the button group next to navigation bar
'.repository-content .file-navigation.js-zeroclipboard-container .BtnGroup',
// the button group in file content header
'.repository-content .file .file-header .file-actions .BtnGroup',
].join(', ')
const btnGroups = document.querySelectorAll(btnGroupSelector)
btnGroups.forEach(btnGroup => {
const copyFileBtn = document.createElement('button')
copyFileBtn.classList.add('btn', 'btn-sm', 'BtnGroup-item', 'copy-file-btn')
copyFileBtn.innerText = 'Copy file'
copyFileBtn.addEventListener('click', () => {
const codeElement = getCodeElement()
if (codeElement) {
if (copyElementContent(codeElement)) {
setTempCopyFileBtnText(copyFileBtn, 'Success!')
} else {
setTempCopyFileBtnText(copyFileBtn, 'Copy failed!')
}
}
})
btnGroup.insertBefore(copyFileBtn, btnGroup.lastChild)
})
}
}
/**
* copy content of a DOM element to clipboard
* @param {element} element
* @returns {boolean} whether copy is successful
*/
function copyElementContent(element: Element) {
let selection = window.getSelection()
if (selection) selection.removeAllRanges()
const range = document.createRange()
range.selectNode(element)
selection = window.getSelection()
if (selection) selection.addRange(range)
const isCopySuccessful = document.execCommand('copy')
selection = window.getSelection()
if (selection) selection.removeAllRanges()
return isCopySuccessful
}
/**
* create a copy file content button `clippy`
* once mouse enters a code snippet of markdown, move clippy into it
* user can copy the snippet's content by click it
*
* TODO: 'reactify' it
*/
function createClippy() {
function setTempClippyIconFeedback(clippy: Element, type: 'success' | 'fail') {
const tempIconClassName = type === 'success' ? 'success' : 'fail'
clippy.classList.add(tempIconClassName)
window.setTimeout(() => {
clippy.classList.remove(tempIconClassName)
}, 1000)
}
/**
* <div class="clippy-wrapper">
* <button class="clippy">
* <i class="octicon octicon-clippy" />
* </button>
* </div>
*/
const clippyWrapper = document.createElement('div')
clippyWrapper.classList.add('clippy-wrapper')
const clippy = document.createElement('button')
clippy.classList.add('clippy')
const clippyIcon = document.createElement('i')
clippyIcon.classList.add('icon')
clippyWrapper.appendChild(clippy)
clippy.appendChild(clippyIcon)
// set clipboard with current code snippet element's content
clippy.addEventListener('click', function onClippyClick() {
if (copyElementContent(currentCodeSnippetElement)) {
setTempClippyIconFeedback(clippy, 'success')
} else {
setTempClippyIconFeedback(clippy, 'fail')
}
})
return clippyWrapper
}
const clippy = createClippy()
let currentCodeSnippetElement: Element
function attachCopySnippet() {
const readmeSelector = '.repository-content div#readme'
return $(readmeSelector, () => {
const readmeArticleSelector = '.repository-content div#readme article'
$(
readmeArticleSelector,
readmeElement =>
readmeElement.addEventListener('mouseover', e => {
// only move clippy when mouse is over a new snippet(<pre>)
const target = e.target as Element
if (target.nodeName === 'PRE') {
| if (currentCodeSnippetElement !== target) {
currentCodeSnippetElement = target
/**
* <article>
* <pre></pre> <!-- case A -->
* <div class="highlight">
* <pre></pre> <!-- case B -->
* </div>
* </article>
*/
if (target.parentNode) target.parentNode.insertBefore(clippy, target)
}
}
| conditional_block |
|
DOMHelper.ts | E extends never
? O extends never
? (Element | null)
: ReturnType<O> | null
: O extends never
? (ReturnType<E> | null)
: ReturnType<O> | ReturnType<E> {
const element = document.querySelector(selector)
if (element) {
return existCallback ? existCallback(element as EE) : element
}
return otherwise ? otherwise() : null
}
function isInCodePage() {
const branchListSelector = '#branch-select-menu.branch-select-menu'
return Boolean($(branchListSelector))
}
function getBranches() {
const branchSelector = '.branch-select-menu .select-menu-list > div .select-menu-item-text'
const branchElements = Array.from(document.querySelectorAll(branchSelector))
return branchElements.map(element => element.innerHTML.trim())
}
function getCurrentBranch() {
const selectedBranchButtonSelector = '.repository-content .branch-select-menu summary'
const branchButtonElement: HTMLElement = $(selectedBranchButtonSelector)
if (branchButtonElement) {
const branchNameSpanElement = branchButtonElement.querySelector('span')
if (branchNameSpanElement) {
const partialBranchNameFromInnerText = branchNameSpanElement.innerText
if (!partialBranchNameFromInnerText.includes('…')) return partialBranchNameFromInnerText
}
const defaultTitle = 'Switch branches or tags'
const title = branchButtonElement.title.trim()
if (title !== defaultTitle && !title.includes(' ')) return title
}
const findFileButtonSelector =
'#js-repo-pjax-container .repository-content .file-navigation a[data-hotkey="t"]'
const urlFromFindFileButton: string | undefined = $(
findFileButtonSelector,
element => (element as HTMLAnchorElement).href,
)
if (urlFromFindFileButton) {
const commitPathRegex = /^(.*?)\/(.*?)\/find\/(.*?)$/
const result = urlFromFindFileButton.match(commitPathRegex)
if (result) {
const [_, userName, repoName, branchName] = result
if (!branchName.includes(' ')) return branchName
}
}
raiseError(new Error('cannot get current branch'))
}
/**
* add the logo element into DOM
*
*/
function insertLogoMountPoint() {
const logoSelector = '.gitako .gitako-logo'
return $(logoSelector) || createLogoMountPoint()
}
function createLogoMountPoint() {
const logoMountElement = document.createElement('div')
logoMountElement.setAttribute('class', 'gitako-logo-mount-point')
document.body.appendChild(logoMountElement)
return logoMountElement
}
/**
* content above the file navigation bar is same for all pages of the repo
* use this function to scroll down a bit to hide them
*/
function scrollToRepoContent() {
const repositoryContentSelector = '.repository-content'
// do NOT use behavior: smooth here as it will scroll horizontally
$(repositoryContentSelector, repositoryContentElement =>
repositoryContentElement.scrollIntoView(),
)
}
const pjax = new PJAX({
elements: '.pjax-link',
selectors: ['.repository-content', 'title'],
scrollTo: false,
analytics: false,
cacheBust: false,
forceCache: true, // TODO: merge namespace, add forceCache
})
function loadWithPJAX(URL: string) {
NProgress.start()
pjax.loadUrl(URL, { scrollTo: 0 })
}
/**
* there are few types of pages on GitHub, mainly
* 1. raw text: code
* 2. rendered content: like Markdown
* 3. preview: like image
*/
const PAGE_TYPES = {
RAW_TEXT: 'raw_text',
RENDERED: 'rendered',
// PREVIEW: 'preview',
OTHERS: 'others',
}
/**
* this function tries to tell which type current page is of
*
* note: not determining through file extension here
* because there might be files using wrong extension name
*
* TODO: distinguish type 'preview'
*/
function getCurrentPageType() {
const blobWrapperSelector = '.repository-content .file .blob-wrapper table'
const readmeSelector = '.repository-content .readme'
return (
$(blobWrapperSelector, () => PAGE_TYPES.RAW_TEXT) ||
$(readmeSelector, () => PAGE_TYPES.RENDERED) ||
PAGE_TYPES.OTHERS
)
}
export const REPO_TYPE_PRIVATE = 'private'
export const REPO_TYPE_PUBLIC = 'public'
function getRepoPageType() {
const headerSelector = `#js-repo-pjax-container .pagehead.repohead h1`
return $(headerSelector, header => {
const repoPageTypes = [REPO_TYPE_PRIVATE, REPO_TYPE_PUBLIC]
for (const repoPageType of repoPageTypes) {
if (header.classList.contains(repoPageType)) {
return repoPageType
}
}
raiseError(new Error('cannot get repo page type'))
})
}
/**
* add copy file content buttons to button groups
* click these buttons will copy file content to clipboard
*/
function attachCopyFileBtn() {
/**
* get text content of raw text content
*/
function getCodeElement() {
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const codeContentSelector = '.repository-content .file .data table'
const codeContentElement = $(codeContentSelector)
if (!codeContentElement) {
raiseError(new Error('cannot find code content element'))
}
return codeContentElement
}
}
/**
* change inner text of copy file button to give feedback
* @param {element} copyFileBtn
* @param {string} text
*/
function setTempCopyFileBtnText(copyFileBtn: HTMLButtonElement, text: string) {
| if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const btnGroupSelector = [
// the button group next to navigation bar
'.repository-content .file-navigation.js-zeroclipboard-container .BtnGroup',
// the button group in file content header
'.repository-content .file .file-header .file-actions .BtnGroup',
].join(', ')
const btnGroups = document.querySelectorAll(btnGroupSelector)
btnGroups.forEach(btnGroup => {
const copyFileBtn = document.createElement('button')
copyFileBtn.classList.add('btn', 'btn-sm', 'BtnGroup-item', 'copy-file-btn')
copyFileBtn.innerText = 'Copy file'
copyFileBtn.addEventListener('click', () => {
const codeElement = getCodeElement()
if (codeElement) {
if (copyElementContent(codeElement)) {
setTempCopyFileBtnText(copyFileBtn, 'Success!')
} else {
setTempCopyFileBtnText(copyFileBtn, 'Copy failed!')
}
}
})
btnGroup.insertBefore(copyFileBtn, btnGroup.lastChild)
})
}
}
/**
* copy content of a DOM element to clipboard
* @param {element} element
* @returns {boolean} whether copy is successful
*/
function copyElementContent(element: Element) {
let selection = window.getSelection()
if (selection) selection.removeAllRanges()
const range = document.createRange()
range.selectNode(element)
selection = window.getSelection()
if (selection) selection.addRange(range)
const isCopySuccessful = document.execCommand('copy')
selection = window.getSelection()
if (selection) selection.removeAllRanges()
return isCopySuccessful
}
/**
* create a copy file content button `clippy`
* once mouse enters a code snippet of markdown, move clippy into it
* user can copy the snippet's content by click it
*
* TODO: 'reactify' it
*/
function createClippy() {
function setTempClippyIconFeedback(clippy: Element, type: 'success' | 'fail') {
const tempIconClassName = type === 'success' ? 'success' : 'fail'
clippy.classList.add(tempIconClassName)
window.setTimeout(() => {
clippy.classList.remove(tempIconClassName)
}, 1000)
}
/**
* <div class="clippy-wrapper">
* <button class="clippy">
* <i class="octicon octicon-clippy" />
* </button>
* </div>
*/
const clippyWrapper = document.createElement('div')
clippyWrapper.classList.add('clippy-wrapper')
const clippy = document.createElement('button')
clippy.classList.add('clippy')
const clippyIcon = document.createElement('i')
clippyIcon.classList.add('icon')
clippyWrapper.appendChild(clippy)
clippy.appendChild(clippyIcon)
// set clipboard with current code snippet element's content
clippy.addEventListener('click', function onClippyClick() {
if (copyElementContent(currentCodeSnippetElement)) {
setTempClippyIconFeedback(clippy, 'success')
} else {
setTempClippyIconFeedback(clippy, 'fail')
}
})
return clippyWrapper
}
const clippy = createClippy()
let currentCodeSnippetElement: Element
function attachCopySnippet() {
const readmeSelector = '.repository-content div#readme'
return $(readmeSelector, () => {
const readmeArticleSelector = '.repository-content div#readme article'
$(
readmeArticleSelector,
readmeElement =>
readmeElement.addEventListener('mouseover', e => {
| copyFileBtn.innerText = text
window.setTimeout(() => (copyFileBtn.innerText = 'Copy file'), 1000)
}
| identifier_body |
DOMHelper.ts | SpanElement = branchButtonElement.querySelector('span')
if (branchNameSpanElement) {
const partialBranchNameFromInnerText = branchNameSpanElement.innerText
if (!partialBranchNameFromInnerText.includes('…')) return partialBranchNameFromInnerText
}
const defaultTitle = 'Switch branches or tags'
const title = branchButtonElement.title.trim()
if (title !== defaultTitle && !title.includes(' ')) return title
}
const findFileButtonSelector =
'#js-repo-pjax-container .repository-content .file-navigation a[data-hotkey="t"]'
const urlFromFindFileButton: string | undefined = $(
findFileButtonSelector,
element => (element as HTMLAnchorElement).href,
)
if (urlFromFindFileButton) {
const commitPathRegex = /^(.*?)\/(.*?)\/find\/(.*?)$/
const result = urlFromFindFileButton.match(commitPathRegex)
if (result) {
const [_, userName, repoName, branchName] = result
if (!branchName.includes(' ')) return branchName
}
}
raiseError(new Error('cannot get current branch'))
}
/**
* add the logo element into DOM
*
*/
function insertLogoMountPoint() {
const logoSelector = '.gitako .gitako-logo'
return $(logoSelector) || createLogoMountPoint()
}
function createLogoMountPoint() {
const logoMountElement = document.createElement('div')
logoMountElement.setAttribute('class', 'gitako-logo-mount-point')
document.body.appendChild(logoMountElement)
return logoMountElement
}
/**
* content above the file navigation bar is same for all pages of the repo
* use this function to scroll down a bit to hide them
*/
function scrollToRepoContent() {
const repositoryContentSelector = '.repository-content'
// do NOT use behavior: smooth here as it will scroll horizontally
$(repositoryContentSelector, repositoryContentElement =>
repositoryContentElement.scrollIntoView(),
)
}
const pjax = new PJAX({
elements: '.pjax-link',
selectors: ['.repository-content', 'title'],
scrollTo: false,
analytics: false,
cacheBust: false,
forceCache: true, // TODO: merge namespace, add forceCache
})
function loadWithPJAX(URL: string) {
NProgress.start()
pjax.loadUrl(URL, { scrollTo: 0 })
}
/**
* there are few types of pages on GitHub, mainly
* 1. raw text: code
* 2. rendered content: like Markdown
* 3. preview: like image
*/
const PAGE_TYPES = {
RAW_TEXT: 'raw_text',
RENDERED: 'rendered',
// PREVIEW: 'preview',
OTHERS: 'others',
}
/**
* this function tries to tell which type current page is of
*
* note: not determining through file extension here
* because there might be files using wrong extension name
*
* TODO: distinguish type 'preview'
*/
function getCurrentPageType() {
const blobWrapperSelector = '.repository-content .file .blob-wrapper table'
const readmeSelector = '.repository-content .readme'
return (
$(blobWrapperSelector, () => PAGE_TYPES.RAW_TEXT) ||
$(readmeSelector, () => PAGE_TYPES.RENDERED) ||
PAGE_TYPES.OTHERS
)
}
export const REPO_TYPE_PRIVATE = 'private'
export const REPO_TYPE_PUBLIC = 'public'
function getRepoPageType() {
const headerSelector = `#js-repo-pjax-container .pagehead.repohead h1`
return $(headerSelector, header => {
const repoPageTypes = [REPO_TYPE_PRIVATE, REPO_TYPE_PUBLIC]
for (const repoPageType of repoPageTypes) {
if (header.classList.contains(repoPageType)) {
return repoPageType
}
}
raiseError(new Error('cannot get repo page type'))
})
}
/**
* add copy file content buttons to button groups
* click these buttons will copy file content to clipboard
*/
function attachCopyFileBtn() {
/**
* get text content of raw text content
*/
function getCodeElement() {
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const codeContentSelector = '.repository-content .file .data table'
const codeContentElement = $(codeContentSelector)
if (!codeContentElement) {
raiseError(new Error('cannot find code content element'))
}
return codeContentElement
}
}
/**
* change inner text of copy file button to give feedback
* @param {element} copyFileBtn
* @param {string} text
*/
function setTempCopyFileBtnText(copyFileBtn: HTMLButtonElement, text: string) {
copyFileBtn.innerText = text
window.setTimeout(() => (copyFileBtn.innerText = 'Copy file'), 1000)
}
if (getCurrentPageType() === PAGE_TYPES.RAW_TEXT) {
const btnGroupSelector = [
// the button group next to navigation bar
'.repository-content .file-navigation.js-zeroclipboard-container .BtnGroup',
// the button group in file content header
'.repository-content .file .file-header .file-actions .BtnGroup',
].join(', ')
const btnGroups = document.querySelectorAll(btnGroupSelector)
btnGroups.forEach(btnGroup => {
const copyFileBtn = document.createElement('button')
copyFileBtn.classList.add('btn', 'btn-sm', 'BtnGroup-item', 'copy-file-btn')
copyFileBtn.innerText = 'Copy file'
copyFileBtn.addEventListener('click', () => {
const codeElement = getCodeElement()
if (codeElement) {
if (copyElementContent(codeElement)) {
setTempCopyFileBtnText(copyFileBtn, 'Success!')
} else {
setTempCopyFileBtnText(copyFileBtn, 'Copy failed!')
}
}
})
btnGroup.insertBefore(copyFileBtn, btnGroup.lastChild)
})
}
}
/**
* copy content of a DOM element to clipboard
* @param {element} element
* @returns {boolean} whether copy is successful
*/
function copyElementContent(element: Element) {
let selection = window.getSelection()
if (selection) selection.removeAllRanges()
const range = document.createRange()
range.selectNode(element)
selection = window.getSelection()
if (selection) selection.addRange(range)
const isCopySuccessful = document.execCommand('copy')
selection = window.getSelection()
if (selection) selection.removeAllRanges()
return isCopySuccessful
}
/**
* create a copy file content button `clippy`
* once mouse enters a code snippet of markdown, move clippy into it
* user can copy the snippet's content by click it
*
* TODO: 'reactify' it
*/
function createClippy() {
function setTempClippyIconFeedback(clippy: Element, type: 'success' | 'fail') {
const tempIconClassName = type === 'success' ? 'success' : 'fail'
clippy.classList.add(tempIconClassName)
window.setTimeout(() => {
clippy.classList.remove(tempIconClassName)
}, 1000)
}
/**
* <div class="clippy-wrapper">
* <button class="clippy">
* <i class="octicon octicon-clippy" />
* </button>
* </div>
*/
const clippyWrapper = document.createElement('div')
clippyWrapper.classList.add('clippy-wrapper')
const clippy = document.createElement('button')
clippy.classList.add('clippy')
const clippyIcon = document.createElement('i')
clippyIcon.classList.add('icon')
clippyWrapper.appendChild(clippy)
clippy.appendChild(clippyIcon)
// set clipboard with current code snippet element's content
clippy.addEventListener('click', function onClippyClick() {
if (copyElementContent(currentCodeSnippetElement)) {
setTempClippyIconFeedback(clippy, 'success')
} else {
setTempClippyIconFeedback(clippy, 'fail')
}
})
return clippyWrapper
}
const clippy = createClippy()
let currentCodeSnippetElement: Element
function attachCopySnippet() {
const readmeSelector = '.repository-content div#readme'
return $(readmeSelector, () => {
const readmeArticleSelector = '.repository-content div#readme article'
$(
readmeArticleSelector,
readmeElement =>
readmeElement.addEventListener('mouseover', e => {
// only move clippy when mouse is over a new snippet(<pre>)
const target = e.target as Element
if (target.nodeName === 'PRE') {
if (currentCodeSnippetElement !== target) {
currentCodeSnippetElement = target
/**
* <article>
* <pre></pre> <!-- case A -->
* <div class="highlight">
* <pre></pre> <!-- case B -->
* </div>
* </article>
*/
if (target.parentNode) target.parentNode.insertBefore(clippy, target)
}
}
}),
() => {
const plainReadmeSelector = '.repository-content div#readme .plain'
$(plainReadmeSelector, undefined, () =>
raiseError(
new Error('cannot find mount point for copy snippet button while readme exists'),
),
)
},
)
})
}
/**
* focus to side bar, user will be able to manipulate it with keyboard
*/
function fo | cusFileExplorer() | identifier_name |
|
libvirt_driver.py | # Libcloud v2.7.0
"""
Start a stopped node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.start_node(node=node)
def ex_shutdown_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Shutdown a running node.
Note: Usually this will result in sending an ACPI event to the node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.stop_node(node=node)
def ex_suspend_node(self, node):
"""
Suspend a running node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.suspend() == 0
def ex_resume_node(self, node):
"""
Resume a suspended node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.resume() == 0
def ex_get_node_by_uuid(self, uuid):
"""
Retrieve Node object for a domain with a provided uuid.
:param uuid: Uuid of the domain.
:type uuid: ``str``
"""
domain = self._get_domain_for_uuid(uuid=uuid)
node = self._to_node(domain=domain)
return node
def ex_get_node_by_name(self, name):
"""
Retrieve Node object for a domain with a provided name.
:param name: Name of the domain.
:type name: ``str``
"""
domain = self._get_domain_for_name(name=name)
node = self._to_node(domain=domain)
return node
def ex_take_node_screenshot(self, node, directory, screen=0):
"""
Take a screenshot of a monitoring of a running instance.
:param node: Node to take the screenshot of.
:type node: :class:`libcloud.compute.base.Node`
:param directory: Path where the screenshot will be saved.
:type directory: ``str``
:param screen: ID of the monitor to take the screenshot of.
:type screen: ``int``
:return: Full path where the screenshot has been saved.
:rtype: ``str``
"""
if not os.path.exists(directory) or not os.path.isdir(directory):
raise ValueError("Invalid value for directory argument")
domain = self._get_domain_for_node(node=node)
stream = self.connection.newStream()
mime_type = domain.screenshot(stream=stream, screen=0)
extensions = mimetypes.guess_all_extensions(type=mime_type)
if extensions:
extension = extensions[0]
else:
extension = ".png"
name = "screenshot-{}{}".format(int(time.time()), extension)
file_path = pjoin(directory, name)
with open(file_path, "wb") as fp:
def write(stream, buf, opaque):
fp.write(buf)
stream.recvAll(write, None)
try:
stream.finish()
except Exception:
# Finish is not supported by all backends
pass
return file_path
def ex_get_hypervisor_hostname(self):
"""
Return a system hostname on which the hypervisor is running.
"""
hostname = self.connection.getHostname()
return hostname
def ex_get_hypervisor_sysinfo(self):
"""
Retrieve hypervisor system information.
:rtype: ``dict``
"""
xml = self.connection.getSysinfo()
etree = ET.XML(xml)
attributes = ["bios", "system", "processor", "memory_device"]
sysinfo = {}
for attribute in attributes:
element = etree.find(attribute)
entries = self._get_entries(element=element)
sysinfo[attribute] = entries
return sysinfo
def _to_nodes(self, domains):
nodes = [self._to_node(domain=domain) for domain in domains]
return nodes
def _to_node(self, domain):
state, max_mem, memory, vcpu_count, used_cpu_time = domain.info()
state = self.NODE_STATE_MAP.get(state, NodeState.UNKNOWN)
public_ips, private_ips = [], []
ip_addresses = self._get_ip_addresses_for_domain(domain)
for ip_address in ip_addresses:
if is_public_subnet(ip_address):
public_ips.append(ip_address)
else:
private_ips.append(ip_address)
extra = {
"uuid": domain.UUIDString(),
"os_type": domain.OSType(),
"types": self.connection.getType(),
"used_memory": memory / 1024,
"vcpu_count": vcpu_count,
"used_cpu_time": used_cpu_time,
}
node = Node(
id=domain.ID(),
name=domain.name(),
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self,
extra=extra,
)
node._uuid = domain.UUIDString() # we want to use a custom UUID
return node
def _get_ip_addresses_for_domain(self, domain):
"""
Retrieve IP addresses for the provided domain.
Note: This functionality is currently only supported on Linux and
only works if this code is run on the same machine as the VMs run
on.
:return: IP addresses for the provided domain.
:rtype: ``list``
"""
result = []
if platform.system() != "Linux":
# Only Linux is supported atm
return result
if "///" not in self._uri:
# Only local libvirtd is supported atm
return result
mac_addresses = self._get_mac_addresses_for_domain(domain=domain)
arp_table = {}
try:
cmd = ["arp", "-an"]
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, _ = child.communicate()
arp_table = self._parse_ip_table_arp(arp_output=stdout)
except OSError as e:
if e.errno == 2:
cmd = ["ip", "neigh"]
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, _ = child.communicate()
arp_table = self._parse_ip_table_neigh(ip_output=stdout)
for mac_address in mac_addresses:
if mac_address in arp_table:
ip_addresses = arp_table[mac_address]
result.extend(ip_addresses)
return result
def _get_mac_addresses_for_domain(self, domain):
"""
Parses network interface MAC addresses from the provided domain.
"""
xml = domain.XMLDesc()
etree = ET.XML(xml)
elems = etree.findall("devices/interface[@type='network']/mac")
result = []
for elem in elems:
mac_address = elem.get("address")
result.append(mac_address)
return result
def _get_domain_for_node(self, node):
"""
Return libvirt domain object for the provided node.
"""
domain = self.connection.lookupByUUIDString(node.uuid)
return domain
def _get_domain_for_uuid(self, uuid):
"""
Return libvirt domain object for the provided uuid.
"""
domain = self.connection.lookupByUUIDString(uuid)
return domain
def _get_domain_for_name(self, name):
"""
Return libvirt domain object for the provided name.
"""
domain = self.connection.lookupByName(name)
return domain
def _get_entries(self, element):
"""
Parse entries dictionary.
:rtype: ``dict``
"""
elements = element.findall("entry")
result = {}
for element in elements:
name = element.get("name")
value = element.text
result[name] = value
return result
def _parse_ip_table_arp(self, arp_output):
"""
Sets up the regexp for parsing out IP addresses from the 'arp -an'
command and pass it along to the parser function.
:return: Dictionary from the parsing function
:rtype: ``dict``
"""
arp_regex = re.compile(r".*?\((.*?)\) at (.*?)\s+")
return self._parse_mac_addr_table(arp_output, arp_regex)
def _parse_ip_table_neigh(self, ip_output):
"""
Sets up the regexp for parsing out IP addresses from the 'ip neighbor'
command and pass it along to the parser function.
:return: Dictionary from the parsing function
:rtype: ``dict``
"""
ip_regex = re.compile(r"(.*?)\s+.*lladdr\s+(.*?)\s+")
return self._parse_mac_addr_table(ip_output, ip_regex)
def _parse_mac_addr_table(self, cmd_output, mac_regex):
"""
Parse the command output and return a dictionary which maps mac address
to an IP address.
:return: Dictionary which maps mac address to IP address.
:rtype: ``dict``
"""
lines = ensure_string(cmd_output).split("\n")
arp_table = defaultdict(list)
for line in lines:
match = mac_regex.match(line)
if not match:
| continue | conditional_block |
|
libvirt_driver.py | # no state
1: NodeState.RUNNING, # domain is running
2: NodeState.PENDING, # domain is blocked on resource
3: NodeState.TERMINATED, # domain is paused by user
4: NodeState.TERMINATED, # domain is being shut down
5: NodeState.TERMINATED, # domain is shut off
6: NodeState.UNKNOWN, # domain is crashed
7: NodeState.UNKNOWN, # domain is suspended by guest power management
}
def __init__(self, uri, key=None, secret=None):
"""
:param uri: Hypervisor URI (e.g. vbox:///session, qemu:///system,
etc.).
:type uri: ``str``
:param key: the username for a remote libvirtd server
:type key: ``str``
:param secret: the password for a remote libvirtd server
:type key: ``str``
"""
if not have_libvirt:
raise RuntimeError("Libvirt driver requires 'libvirt' Python " + "package")
self._uri = uri
self._key = key
self._secret = secret
if uri is not None and "+tcp" in self._uri:
if key is None and secret is None:
raise RuntimeError(
"The remote Libvirt instance requires "
+ "authentication, please set 'key' and "
+ "'secret' parameters"
)
auth = [
[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_PASSPHRASE],
self._cred_callback,
None,
]
self.connection = libvirt.openAuth(uri, auth, 0)
else:
self.connection = libvirt.open(uri)
if uri is None:
self._uri = self.connection.getInfo()
def _cred_callback(self, cred, user_data):
"""
Callback for the authentication scheme, which will provide username
and password for the login. Reference: ( http://bit.ly/1U5yyQg )
:param cred: The credentials requested and the return
:type cred: ``list``
:param user_data: Custom data provided to the authentication routine
:type user_data: ``list``
:rtype: ``int``
"""
for credential in cred:
if credential[0] == libvirt.VIR_CRED_AUTHNAME:
credential[4] = self._key
elif credential[0] == libvirt.VIR_CRED_PASSPHRASE:
credential[4] = self._secret
return 0
def list_nodes(self):
|
def reboot_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.reboot(flags=0) == 0
def destroy_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.destroy() == 0
def start_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.create() == 0
def stop_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.shutdown() == 0
def ex_start_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Start a stopped node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.start_node(node=node)
def ex_shutdown_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Shutdown a running node.
Note: Usually this will result in sending an ACPI event to the node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.stop_node(node=node)
def ex_suspend_node(self, node):
"""
Suspend a running node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.suspend() == 0
def ex_resume_node(self, node):
"""
Resume a suspended node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.resume() == 0
def ex_get_node_by_uuid(self, uuid):
"""
Retrieve Node object for a domain with a provided uuid.
:param uuid: Uuid of the domain.
:type uuid: ``str``
"""
domain = self._get_domain_for_uuid(uuid=uuid)
node = self._to_node(domain=domain)
return node
def ex_get_node_by_name(self, name):
"""
Retrieve Node object for a domain with a provided name.
:param name: Name of the domain.
:type name: ``str``
"""
domain = self._get_domain_for_name(name=name)
node = self._to_node(domain=domain)
return node
def ex_take_node_screenshot(self, node, directory, screen=0):
"""
Take a screenshot of a monitoring of a running instance.
:param node: Node to take the screenshot of.
:type node: :class:`libcloud.compute.base.Node`
:param directory: Path where the screenshot will be saved.
:type directory: ``str``
:param screen: ID of the monitor to take the screenshot of.
:type screen: ``int``
:return: Full path where the screenshot has been saved.
:rtype: ``str``
"""
if not os.path.exists(directory) or not os.path.isdir(directory):
raise ValueError("Invalid value for directory argument")
domain = self._get_domain_for_node(node=node)
stream = self.connection.newStream()
mime_type = domain.screenshot(stream=stream, screen=0)
extensions = mimetypes.guess_all_extensions(type=mime_type)
if extensions:
extension = extensions[0]
else:
extension = ".png"
name = "screenshot-{}{}".format(int(time.time()), extension)
file_path = pjoin(directory, name)
with open(file_path, "wb") as fp:
def write(stream, buf, opaque):
fp.write(buf)
stream.recvAll(write, None)
try:
stream.finish()
except Exception:
# Finish is not supported by all backends
pass
return file_path
def ex_get_hypervisor_hostname(self):
"""
Return a system hostname on which the hypervisor is running.
"""
hostname = self.connection.getHostname()
return hostname
def ex_get_hypervisor_sysinfo(self):
"""
Retrieve hypervisor system information.
:rtype: ``dict``
"""
xml = self.connection.getSysinfo()
etree = ET.XML(xml)
attributes = ["bios", "system", "processor", "memory_device"]
sysinfo = {}
for attribute in attributes:
element = etree.find(attribute)
entries = self._get_entries(element=element)
sysinfo[attribute] = entries
return sysinfo
def _to_nodes(self, domains):
nodes = [self._to_node(domain=domain) for domain in domains]
return nodes
def _to_node(self, domain):
state, max_mem, memory, vcpu_count, used_cpu_time = domain.info()
state = self.NODE_STATE_MAP.get(state, NodeState.UNKNOWN)
public_ips, private_ips = [], []
ip_addresses = self._get_ip_addresses_for_domain(domain)
for ip_address in ip_addresses:
if is_public_subnet(ip_address):
public_ips.append(ip_address)
else:
private_ips.append(ip_address)
extra = {
"uuid": domain.UUIDString(),
"os_type": domain.OSType(),
"types": self.connection.getType(),
"used_memory": memory / 1024,
"vcpu_count": vcpu_count,
"used_cpu_time": used_cpu_time,
}
node = Node(
id=domain.ID(),
name=domain.name(),
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self,
extra=extra,
)
node._uuid = domain.UUIDString() # we want to use a custom UUID
return node
def _get_ip_addresses_for_domain(self, domain):
"""
Retrieve IP addresses for the provided domain.
Note: This functionality is currently only supported on Linux and
only works if this code is run on the same machine as the VMs run
on.
:return: IP addresses for the provided domain.
:rtype: ``list``
"""
result = []
if platform.system() != "Linux":
# | domains = self.connection.listAllDomains()
nodes = self._to_nodes(domains=domains)
return nodes | identifier_body |
libvirt_driver.py | uri, key=None, secret=None):
"""
:param uri: Hypervisor URI (e.g. vbox:///session, qemu:///system,
etc.).
:type uri: ``str``
:param key: the username for a remote libvirtd server
:type key: ``str``
:param secret: the password for a remote libvirtd server
:type key: ``str``
"""
if not have_libvirt:
raise RuntimeError("Libvirt driver requires 'libvirt' Python " + "package")
self._uri = uri
self._key = key
self._secret = secret
if uri is not None and "+tcp" in self._uri:
if key is None and secret is None:
raise RuntimeError(
"The remote Libvirt instance requires "
+ "authentication, please set 'key' and "
+ "'secret' parameters"
)
auth = [
[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_PASSPHRASE],
self._cred_callback,
None,
]
self.connection = libvirt.openAuth(uri, auth, 0)
else:
self.connection = libvirt.open(uri)
if uri is None:
self._uri = self.connection.getInfo()
def _cred_callback(self, cred, user_data):
"""
Callback for the authentication scheme, which will provide username
and password for the login. Reference: ( http://bit.ly/1U5yyQg )
:param cred: The credentials requested and the return
:type cred: ``list``
:param user_data: Custom data provided to the authentication routine
:type user_data: ``list``
:rtype: ``int``
"""
for credential in cred:
if credential[0] == libvirt.VIR_CRED_AUTHNAME:
credential[4] = self._key
elif credential[0] == libvirt.VIR_CRED_PASSPHRASE:
credential[4] = self._secret
return 0
def list_nodes(self):
domains = self.connection.listAllDomains()
nodes = self._to_nodes(domains=domains)
return nodes
def reboot_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.reboot(flags=0) == 0
def destroy_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.destroy() == 0
def start_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.create() == 0
def stop_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.shutdown() == 0
def ex_start_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Start a stopped node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.start_node(node=node)
def ex_shutdown_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Shutdown a running node.
Note: Usually this will result in sending an ACPI event to the node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.stop_node(node=node)
def ex_suspend_node(self, node):
"""
Suspend a running node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.suspend() == 0
def ex_resume_node(self, node):
"""
Resume a suspended node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.resume() == 0
def ex_get_node_by_uuid(self, uuid):
"""
Retrieve Node object for a domain with a provided uuid.
:param uuid: Uuid of the domain.
:type uuid: ``str``
"""
domain = self._get_domain_for_uuid(uuid=uuid)
node = self._to_node(domain=domain)
return node
def ex_get_node_by_name(self, name):
"""
Retrieve Node object for a domain with a provided name.
:param name: Name of the domain.
:type name: ``str``
"""
domain = self._get_domain_for_name(name=name)
node = self._to_node(domain=domain)
return node
def ex_take_node_screenshot(self, node, directory, screen=0):
"""
Take a screenshot of a monitoring of a running instance.
:param node: Node to take the screenshot of.
:type node: :class:`libcloud.compute.base.Node`
:param directory: Path where the screenshot will be saved.
:type directory: ``str``
:param screen: ID of the monitor to take the screenshot of.
:type screen: ``int``
:return: Full path where the screenshot has been saved.
:rtype: ``str``
"""
if not os.path.exists(directory) or not os.path.isdir(directory):
raise ValueError("Invalid value for directory argument")
domain = self._get_domain_for_node(node=node)
stream = self.connection.newStream()
mime_type = domain.screenshot(stream=stream, screen=0)
extensions = mimetypes.guess_all_extensions(type=mime_type)
if extensions:
extension = extensions[0]
else:
extension = ".png"
name = "screenshot-{}{}".format(int(time.time()), extension)
file_path = pjoin(directory, name)
with open(file_path, "wb") as fp:
def write(stream, buf, opaque):
fp.write(buf)
stream.recvAll(write, None)
try:
stream.finish()
except Exception:
# Finish is not supported by all backends
pass
return file_path
def ex_get_hypervisor_hostname(self):
"""
Return a system hostname on which the hypervisor is running.
"""
hostname = self.connection.getHostname()
return hostname
def ex_get_hypervisor_sysinfo(self):
"""
Retrieve hypervisor system information.
:rtype: ``dict``
"""
xml = self.connection.getSysinfo()
etree = ET.XML(xml)
attributes = ["bios", "system", "processor", "memory_device"]
sysinfo = {}
for attribute in attributes:
element = etree.find(attribute)
entries = self._get_entries(element=element)
sysinfo[attribute] = entries
return sysinfo
def _to_nodes(self, domains):
nodes = [self._to_node(domain=domain) for domain in domains]
return nodes
def _to_node(self, domain):
state, max_mem, memory, vcpu_count, used_cpu_time = domain.info()
state = self.NODE_STATE_MAP.get(state, NodeState.UNKNOWN)
public_ips, private_ips = [], []
ip_addresses = self._get_ip_addresses_for_domain(domain)
for ip_address in ip_addresses:
if is_public_subnet(ip_address):
public_ips.append(ip_address)
else:
private_ips.append(ip_address)
extra = {
"uuid": domain.UUIDString(),
"os_type": domain.OSType(),
"types": self.connection.getType(),
"used_memory": memory / 1024,
"vcpu_count": vcpu_count,
"used_cpu_time": used_cpu_time,
}
node = Node(
id=domain.ID(),
name=domain.name(),
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self,
extra=extra,
)
node._uuid = domain.UUIDString() # we want to use a custom UUID
return node
def _get_ip_addresses_for_domain(self, domain):
"""
Retrieve IP addresses for the provided domain.
Note: This functionality is currently only supported on Linux and
only works if this code is run on the same machine as the VMs run
on.
:return: IP addresses for the provided domain.
:rtype: ``list``
"""
result = []
if platform.system() != "Linux":
# Only Linux is supported atm
return result
if "///" not in self._uri:
# Only local libvirtd is supported atm
return result
mac_addresses = self._get_mac_addresses_for_domain(domain=domain)
arp_table = {}
try:
cmd = ["arp", "-an"]
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, _ = child.communicate()
arp_table = self._parse_ip_table_arp(arp_output=stdout)
except OSError as e:
if e.errno == 2:
cmd = ["ip", "neigh"] | random_line_split |
||
libvirt_driver.py | # no state
1: NodeState.RUNNING, # domain is running
2: NodeState.PENDING, # domain is blocked on resource
3: NodeState.TERMINATED, # domain is paused by user
4: NodeState.TERMINATED, # domain is being shut down
5: NodeState.TERMINATED, # domain is shut off
6: NodeState.UNKNOWN, # domain is crashed
7: NodeState.UNKNOWN, # domain is suspended by guest power management
}
def __init__(self, uri, key=None, secret=None):
"""
:param uri: Hypervisor URI (e.g. vbox:///session, qemu:///system,
etc.).
:type uri: ``str``
:param key: the username for a remote libvirtd server
:type key: ``str``
:param secret: the password for a remote libvirtd server
:type key: ``str``
"""
if not have_libvirt:
raise RuntimeError("Libvirt driver requires 'libvirt' Python " + "package")
self._uri = uri
self._key = key
self._secret = secret
if uri is not None and "+tcp" in self._uri:
if key is None and secret is None:
raise RuntimeError(
"The remote Libvirt instance requires "
+ "authentication, please set 'key' and "
+ "'secret' parameters"
)
auth = [
[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_PASSPHRASE],
self._cred_callback,
None,
]
self.connection = libvirt.openAuth(uri, auth, 0)
else:
self.connection = libvirt.open(uri)
if uri is None:
self._uri = self.connection.getInfo()
def _cred_callback(self, cred, user_data):
"""
Callback for the authentication scheme, which will provide username
and password for the login. Reference: ( http://bit.ly/1U5yyQg )
:param cred: The credentials requested and the return
:type cred: ``list``
:param user_data: Custom data provided to the authentication routine
:type user_data: ``list``
:rtype: ``int``
"""
for credential in cred:
if credential[0] == libvirt.VIR_CRED_AUTHNAME:
credential[4] = self._key
elif credential[0] == libvirt.VIR_CRED_PASSPHRASE:
credential[4] = self._secret
return 0
def list_nodes(self):
domains = self.connection.listAllDomains()
nodes = self._to_nodes(domains=domains)
return nodes
def reboot_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.reboot(flags=0) == 0
def destroy_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.destroy() == 0
def start_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.create() == 0
def stop_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.shutdown() == 0
def ex_start_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Start a stopped node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.start_node(node=node)
def ex_shutdown_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
"""
Shutdown a running node.
Note: Usually this will result in sending an ACPI event to the node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
return self.stop_node(node=node)
def ex_suspend_node(self, node):
"""
Suspend a running node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.suspend() == 0
def ex_resume_node(self, node):
"""
Resume a suspended node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.resume() == 0
def ex_get_node_by_uuid(self, uuid):
"""
Retrieve Node object for a domain with a provided uuid.
:param uuid: Uuid of the domain.
:type uuid: ``str``
"""
domain = self._get_domain_for_uuid(uuid=uuid)
node = self._to_node(domain=domain)
return node
def | (self, name):
"""
Retrieve Node object for a domain with a provided name.
:param name: Name of the domain.
:type name: ``str``
"""
domain = self._get_domain_for_name(name=name)
node = self._to_node(domain=domain)
return node
def ex_take_node_screenshot(self, node, directory, screen=0):
"""
Take a screenshot of a monitoring of a running instance.
:param node: Node to take the screenshot of.
:type node: :class:`libcloud.compute.base.Node`
:param directory: Path where the screenshot will be saved.
:type directory: ``str``
:param screen: ID of the monitor to take the screenshot of.
:type screen: ``int``
:return: Full path where the screenshot has been saved.
:rtype: ``str``
"""
if not os.path.exists(directory) or not os.path.isdir(directory):
raise ValueError("Invalid value for directory argument")
domain = self._get_domain_for_node(node=node)
stream = self.connection.newStream()
mime_type = domain.screenshot(stream=stream, screen=0)
extensions = mimetypes.guess_all_extensions(type=mime_type)
if extensions:
extension = extensions[0]
else:
extension = ".png"
name = "screenshot-{}{}".format(int(time.time()), extension)
file_path = pjoin(directory, name)
with open(file_path, "wb") as fp:
def write(stream, buf, opaque):
fp.write(buf)
stream.recvAll(write, None)
try:
stream.finish()
except Exception:
# Finish is not supported by all backends
pass
return file_path
def ex_get_hypervisor_hostname(self):
"""
Return a system hostname on which the hypervisor is running.
"""
hostname = self.connection.getHostname()
return hostname
def ex_get_hypervisor_sysinfo(self):
"""
Retrieve hypervisor system information.
:rtype: ``dict``
"""
xml = self.connection.getSysinfo()
etree = ET.XML(xml)
attributes = ["bios", "system", "processor", "memory_device"]
sysinfo = {}
for attribute in attributes:
element = etree.find(attribute)
entries = self._get_entries(element=element)
sysinfo[attribute] = entries
return sysinfo
def _to_nodes(self, domains):
nodes = [self._to_node(domain=domain) for domain in domains]
return nodes
def _to_node(self, domain):
state, max_mem, memory, vcpu_count, used_cpu_time = domain.info()
state = self.NODE_STATE_MAP.get(state, NodeState.UNKNOWN)
public_ips, private_ips = [], []
ip_addresses = self._get_ip_addresses_for_domain(domain)
for ip_address in ip_addresses:
if is_public_subnet(ip_address):
public_ips.append(ip_address)
else:
private_ips.append(ip_address)
extra = {
"uuid": domain.UUIDString(),
"os_type": domain.OSType(),
"types": self.connection.getType(),
"used_memory": memory / 1024,
"vcpu_count": vcpu_count,
"used_cpu_time": used_cpu_time,
}
node = Node(
id=domain.ID(),
name=domain.name(),
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self,
extra=extra,
)
node._uuid = domain.UUIDString() # we want to use a custom UUID
return node
def _get_ip_addresses_for_domain(self, domain):
"""
Retrieve IP addresses for the provided domain.
Note: This functionality is currently only supported on Linux and
only works if this code is run on the same machine as the VMs run
on.
:return: IP addresses for the provided domain.
:rtype: ``list``
"""
result = []
if platform.system() != "Linux":
| ex_get_node_by_name | identifier_name |
cache.rs | {
use directories::ProjectDirs;
use log::{debug, warn};
use spin::Once;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
struct Config {
pub cache_enabled: bool,
pub cache_dir: PathBuf,
pub compression_level: i32,
}
// Private static, so only internal function can access it.
static CONFIG: Once<Config> = Once::new();
static INIT_CALLED: AtomicBool = AtomicBool::new(false);
static DEFAULT_COMPRESSION_LEVEL: i32 = 0; // 0 for zstd means "use default level"
/// Returns true if and only if the cache is enabled.
pub fn cache_enabled() -> bool {
// Not everyone knows about the cache system, i.e. the tests,
// so the default is cache disabled.
CONFIG
.call_once(|| Config::new_cache_disabled())
.cache_enabled
}
/// Returns path to the cache directory.
///
/// Panics if the cache is disabled.
pub fn cache_directory() -> &'static PathBuf {
&CONFIG
.r#try()
.expect("Cache system must be initialized")
.cache_dir
}
/// Returns cache compression level.
///
/// Panics if the cache is disabled.
pub fn compression_level() -> i32 {
CONFIG
.r#try()
.expect("Cache system must be initialized")
.compression_level
}
/// Initializes the cache system. Should be called exactly once,
/// and before using the cache system. Otherwise it can panic.
pub fn init<P: AsRef<Path>>(enabled: bool, dir: Option<P>, compression_level: Option<i32>) {
INIT_CALLED
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.expect("Cache system init must be called at most once");
assert!(
CONFIG.r#try().is_none(),
"Cache system init must be called before using the system."
);
let conf = CONFIG.call_once(|| {
Config::new(
enabled,
dir,
compression_level.unwrap_or(DEFAULT_COMPRESSION_LEVEL),
)
});
debug!(
"Cache init(): enabled={}, cache-dir={:?}, compression-level={}",
conf.cache_enabled, conf.cache_dir, conf.compression_level,
);
}
impl Config {
pub fn new_cache_disabled() -> Self {
Self {
cache_enabled: false,
cache_dir: PathBuf::new(),
compression_level: DEFAULT_COMPRESSION_LEVEL,
}
}
pub fn new<P: AsRef<Path>>(enabled: bool, dir: Option<P>, compression_level: i32) -> Self {
if enabled {
match dir {
Some(dir) => Self::new_step2(dir.as_ref(), compression_level),
None => match ProjectDirs::from("", "CraneStation", "wasmtime") {
Some(proj_dirs) => {
Self::new_step2(proj_dirs.cache_dir(), compression_level)
}
None => {
warn!("Cache directory not specified and failed to find the default. Disabling cache.");
Self::new_cache_disabled()
}
},
}
} else {
Self::new_cache_disabled()
}
}
fn | (dir: &Path, compression_level: i32) -> Self {
// On Windows, if we want long paths, we need '\\?\' prefix, but it doesn't work
// with relative paths. One way to get absolute path (the only one?) is to use
// fs::canonicalize, but it requires that given path exists. The extra advantage
// of this method is fact that the method prepends '\\?\' on Windows.
match fs::create_dir_all(dir) {
Ok(()) => match fs::canonicalize(dir) {
Ok(p) => Self {
cache_enabled: true,
cache_dir: p,
compression_level,
},
Err(err) => {
warn!(
"Failed to canonicalize the cache directory. Disabling cache. \
Message: {}",
err
);
Self::new_cache_disabled()
}
},
Err(err) => {
warn!(
"Failed to create the cache directory. Disabling cache. Message: {}",
err
);
Self::new_cache_disabled()
}
}
}
}
}
lazy_static! {
static ref SELF_MTIME: String = {
std::env::current_exe()
.map_err(|_| warn!("Failed to get path of current executable"))
.ok()
.and_then(|path| {
fs::metadata(&path)
.map_err(|_| warn!("Failed to get metadata of current executable"))
.ok()
})
.and_then(|metadata| {
metadata
.modified()
.map_err(|_| warn!("Failed to get metadata of current executable"))
.ok()
})
.and_then(|mtime| {
Some(match mtime.duration_since(std::time::UNIX_EPOCH) {
Ok(duration) => format!("{}", duration.as_millis()),
Err(err) => format!("m{}", err.duration().as_millis()),
})
})
.unwrap_or("no-mtime".to_string())
};
}
pub struct ModuleCacheEntry {
mod_cache_path: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ModuleCacheData {
compilation: Compilation,
relocations: Relocations,
address_transforms: ModuleAddressMap,
value_ranges: ValueLabelsRanges,
stack_slots: PrimaryMap<DefinedFuncIndex, ir::StackSlots>,
}
type ModuleCacheDataTupleType = (
Compilation,
Relocations,
ModuleAddressMap,
ValueLabelsRanges,
PrimaryMap<DefinedFuncIndex, ir::StackSlots>,
);
struct Sha256Hasher(Sha256);
impl ModuleCacheEntry {
pub fn new<'data>(
module: &Module,
function_body_inputs: &PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>>,
isa: &dyn isa::TargetIsa,
compiler_name: &str,
generate_debug_info: bool,
) -> Self {
let mod_cache_path = if conf::cache_enabled() {
let hash = Sha256Hasher::digest(module, function_body_inputs);
let compiler_dir = if cfg!(debug_assertions) {
format!(
"{comp_name}-{comp_ver}-{comp_mtime}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
comp_mtime = *SELF_MTIME,
)
} else {
format!(
"{comp_name}-{comp_ver}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
)
};
let mod_filename = format!(
"mod-{mod_hash}{mod_dbg}",
mod_hash = base64::encode_config(&hash, base64::URL_SAFE_NO_PAD), // standard encoding uses '/' which can't be used for filename
mod_dbg = if generate_debug_info { ".d" } else { "" },
);
Some(
conf::cache_directory()
.join(isa.triple().to_string())
.join(compiler_dir)
.join(mod_filename),
)
} else {
None
};
Self { mod_cache_path }
}
pub fn get_data(&self) -> Option<ModuleCacheData> {
let path = self.mod_cache_path.as_ref()?;
trace!("get_data() for path: {}", path.display());
let compressed_cache_bytes = fs::read(path).ok()?;
let cache_bytes = zstd::decode_all(&compressed_cache_bytes[..])
.map_err(|err| warn!("Failed to decompress cached code: {}", err))
.ok()?;
bincode::deserialize(&cache_bytes[..])
.map_err(|err| warn!("Failed to deserialize cached code: {}", err))
.ok()
}
pub fn update_data(&self, data: &ModuleCacheData) {
let _ = self.update_data_impl(data);
}
fn update_data_impl(&self, data: &ModuleCacheData) -> Option<()> {
let path = self.mod_cache_path.as_ref()?;
trace!("update_data() for path: {}", path.display());
let serialized_data = bincode::serialize(&data)
.map_err(|err| warn!("Failed to serialize cached code: {}", err))
.ok()?;
let compressed_data = zstd::encode_all(&serialized_data[..], conf::compression_level())
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()?;
// Optimize syscalls: first, try writing to disk. It should succeed in most cases.
// Otherwise, try creating the cache directory and retry writing to the file.
let err = fs::write(path, &compressed_data).err()?; // return on success
debug!(
"Attempting to create the cache directory, because \
failed to write cached code to disk, path: {}, message: {}",
path.display(),
err,
);
let cache_dir = path.parent().unwrap();
fs::create_dir_all(cache_dir)
.map_err(|err| {
warn!(
"Failed to create cache directory, path: {}, message: {}",
cache_dir.display(),
err
)
})
.ok()?;
let err = fs::write(path, &compressed_data).err()?;
warn!(
"Failed to write cached | new_step2 | identifier_name |
cache.rs | {
use directories::ProjectDirs;
use log::{debug, warn};
use spin::Once;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
struct Config {
pub cache_enabled: bool,
pub cache_dir: PathBuf,
pub compression_level: i32,
}
// Private static, so only internal function can access it.
static CONFIG: Once<Config> = Once::new();
static INIT_CALLED: AtomicBool = AtomicBool::new(false);
static DEFAULT_COMPRESSION_LEVEL: i32 = 0; // 0 for zstd means "use default level"
/// Returns true if and only if the cache is enabled.
pub fn cache_enabled() -> bool {
// Not everyone knows about the cache system, i.e. the tests,
// so the default is cache disabled.
CONFIG
.call_once(|| Config::new_cache_disabled())
.cache_enabled
}
/// Returns path to the cache directory.
///
/// Panics if the cache is disabled.
pub fn cache_directory() -> &'static PathBuf {
&CONFIG
.r#try()
.expect("Cache system must be initialized")
.cache_dir
}
/// Returns cache compression level.
///
/// Panics if the cache is disabled.
pub fn compression_level() -> i32 {
CONFIG
.r#try()
.expect("Cache system must be initialized")
.compression_level
}
/// Initializes the cache system. Should be called exactly once,
/// and before using the cache system. Otherwise it can panic.
pub fn init<P: AsRef<Path>>(enabled: bool, dir: Option<P>, compression_level: Option<i32>) {
INIT_CALLED
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.expect("Cache system init must be called at most once");
assert!(
CONFIG.r#try().is_none(),
"Cache system init must be called before using the system."
);
let conf = CONFIG.call_once(|| {
Config::new(
enabled,
dir,
compression_level.unwrap_or(DEFAULT_COMPRESSION_LEVEL),
)
});
debug!(
"Cache init(): enabled={}, cache-dir={:?}, compression-level={}",
conf.cache_enabled, conf.cache_dir, conf.compression_level,
);
}
impl Config {
pub fn new_cache_disabled() -> Self {
Self {
cache_enabled: false,
cache_dir: PathBuf::new(),
compression_level: DEFAULT_COMPRESSION_LEVEL,
}
}
pub fn new<P: AsRef<Path>>(enabled: bool, dir: Option<P>, compression_level: i32) -> Self {
if enabled {
match dir {
Some(dir) => Self::new_step2(dir.as_ref(), compression_level),
None => match ProjectDirs::from("", "CraneStation", "wasmtime") {
Some(proj_dirs) => {
Self::new_step2(proj_dirs.cache_dir(), compression_level)
}
None => {
warn!("Cache directory not specified and failed to find the default. Disabling cache.");
Self::new_cache_disabled()
}
},
}
} else {
Self::new_cache_disabled()
}
}
fn new_step2(dir: &Path, compression_level: i32) -> Self {
// On Windows, if we want long paths, we need '\\?\' prefix, but it doesn't work
// with relative paths. One way to get absolute path (the only one?) is to use
// fs::canonicalize, but it requires that given path exists. The extra advantage
// of this method is fact that the method prepends '\\?\' on Windows.
match fs::create_dir_all(dir) {
Ok(()) => match fs::canonicalize(dir) {
Ok(p) => Self {
cache_enabled: true,
cache_dir: p,
compression_level,
},
Err(err) => {
warn!(
"Failed to canonicalize the cache directory. Disabling cache. \
Message: {}",
err
);
Self::new_cache_disabled()
}
},
Err(err) => {
warn!(
"Failed to create the cache directory. Disabling cache. Message: {}",
err
);
Self::new_cache_disabled()
}
}
}
}
}
lazy_static! {
static ref SELF_MTIME: String = {
std::env::current_exe()
.map_err(|_| warn!("Failed to get path of current executable"))
.ok()
.and_then(|path| {
fs::metadata(&path)
.map_err(|_| warn!("Failed to get metadata of current executable"))
.ok()
})
.and_then(|metadata| {
metadata
.modified()
.map_err(|_| warn!("Failed to get metadata of current executable"))
.ok()
})
.and_then(|mtime| {
Some(match mtime.duration_since(std::time::UNIX_EPOCH) {
Ok(duration) => format!("{}", duration.as_millis()),
Err(err) => format!("m{}", err.duration().as_millis()),
})
})
.unwrap_or("no-mtime".to_string())
};
}
pub struct ModuleCacheEntry {
mod_cache_path: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ModuleCacheData {
compilation: Compilation,
relocations: Relocations,
address_transforms: ModuleAddressMap,
value_ranges: ValueLabelsRanges,
stack_slots: PrimaryMap<DefinedFuncIndex, ir::StackSlots>,
}
type ModuleCacheDataTupleType = (
Compilation,
Relocations,
ModuleAddressMap,
ValueLabelsRanges,
PrimaryMap<DefinedFuncIndex, ir::StackSlots>,
);
struct Sha256Hasher(Sha256);
impl ModuleCacheEntry {
pub fn new<'data>(
module: &Module,
function_body_inputs: &PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>>,
isa: &dyn isa::TargetIsa,
compiler_name: &str,
generate_debug_info: bool,
) -> Self | mod_dbg = if generate_debug_info { ".d" } else { "" },
);
Some(
conf::cache_directory()
.join(isa.triple().to_string())
.join(compiler_dir)
.join(mod_filename),
)
} else {
None
};
Self { mod_cache_path }
}
pub fn get_data(&self) -> Option<ModuleCacheData> {
let path = self.mod_cache_path.as_ref()?;
trace!("get_data() for path: {}", path.display());
let compressed_cache_bytes = fs::read(path).ok()?;
let cache_bytes = zstd::decode_all(&compressed_cache_bytes[..])
.map_err(|err| warn!("Failed to decompress cached code: {}", err))
.ok()?;
bincode::deserialize(&cache_bytes[..])
.map_err(|err| warn!("Failed to deserialize cached code: {}", err))
.ok()
}
pub fn update_data(&self, data: &ModuleCacheData) {
let _ = self.update_data_impl(data);
}
fn update_data_impl(&self, data: &ModuleCacheData) -> Option<()> {
let path = self.mod_cache_path.as_ref()?;
trace!("update_data() for path: {}", path.display());
let serialized_data = bincode::serialize(&data)
.map_err(|err| warn!("Failed to serialize cached code: {}", err))
.ok()?;
let compressed_data = zstd::encode_all(&serialized_data[..], conf::compression_level())
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()?;
// Optimize syscalls: first, try writing to disk. It should succeed in most cases.
// Otherwise, try creating the cache directory and retry writing to the file.
let err = fs::write(path, &compressed_data).err()?; // return on success
debug!(
"Attempting to create the cache directory, because \
failed to write cached code to disk, path: {}, message: {}",
path.display(),
err,
);
let cache_dir = path.parent().unwrap();
fs::create_dir_all(cache_dir)
.map_err(|err| {
warn!(
"Failed to create cache directory, path: {}, message: {}",
cache_dir.display(),
err
)
})
.ok()?;
let err = fs::write(path, &compressed_data).err()?;
warn!(
"Failed to write cached code | {
let mod_cache_path = if conf::cache_enabled() {
let hash = Sha256Hasher::digest(module, function_body_inputs);
let compiler_dir = if cfg!(debug_assertions) {
format!(
"{comp_name}-{comp_ver}-{comp_mtime}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
comp_mtime = *SELF_MTIME,
)
} else {
format!(
"{comp_name}-{comp_ver}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
)
};
let mod_filename = format!(
"mod-{mod_hash}{mod_dbg}",
mod_hash = base64::encode_config(&hash, base64::URL_SAFE_NO_PAD), // standard encoding uses '/' which can't be used for filename | identifier_body |
cache.rs | {
use directories::ProjectDirs;
use log::{debug, warn};
use spin::Once;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering}; | }
// Private static, so only internal function can access it.
static CONFIG: Once<Config> = Once::new();
static INIT_CALLED: AtomicBool = AtomicBool::new(false);
static DEFAULT_COMPRESSION_LEVEL: i32 = 0; // 0 for zstd means "use default level"
/// Returns true if and only if the cache is enabled.
pub fn cache_enabled() -> bool {
// Not everyone knows about the cache system, i.e. the tests,
// so the default is cache disabled.
CONFIG
.call_once(|| Config::new_cache_disabled())
.cache_enabled
}
/// Returns path to the cache directory.
///
/// Panics if the cache is disabled.
pub fn cache_directory() -> &'static PathBuf {
&CONFIG
.r#try()
.expect("Cache system must be initialized")
.cache_dir
}
/// Returns cache compression level.
///
/// Panics if the cache is disabled.
pub fn compression_level() -> i32 {
CONFIG
.r#try()
.expect("Cache system must be initialized")
.compression_level
}
/// Initializes the cache system. Should be called exactly once,
/// and before using the cache system. Otherwise it can panic.
pub fn init<P: AsRef<Path>>(enabled: bool, dir: Option<P>, compression_level: Option<i32>) {
INIT_CALLED
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.expect("Cache system init must be called at most once");
assert!(
CONFIG.r#try().is_none(),
"Cache system init must be called before using the system."
);
let conf = CONFIG.call_once(|| {
Config::new(
enabled,
dir,
compression_level.unwrap_or(DEFAULT_COMPRESSION_LEVEL),
)
});
debug!(
"Cache init(): enabled={}, cache-dir={:?}, compression-level={}",
conf.cache_enabled, conf.cache_dir, conf.compression_level,
);
}
impl Config {
pub fn new_cache_disabled() -> Self {
Self {
cache_enabled: false,
cache_dir: PathBuf::new(),
compression_level: DEFAULT_COMPRESSION_LEVEL,
}
}
pub fn new<P: AsRef<Path>>(enabled: bool, dir: Option<P>, compression_level: i32) -> Self {
if enabled {
match dir {
Some(dir) => Self::new_step2(dir.as_ref(), compression_level),
None => match ProjectDirs::from("", "CraneStation", "wasmtime") {
Some(proj_dirs) => {
Self::new_step2(proj_dirs.cache_dir(), compression_level)
}
None => {
warn!("Cache directory not specified and failed to find the default. Disabling cache.");
Self::new_cache_disabled()
}
},
}
} else {
Self::new_cache_disabled()
}
}
fn new_step2(dir: &Path, compression_level: i32) -> Self {
// On Windows, if we want long paths, we need '\\?\' prefix, but it doesn't work
// with relative paths. One way to get absolute path (the only one?) is to use
// fs::canonicalize, but it requires that given path exists. The extra advantage
// of this method is fact that the method prepends '\\?\' on Windows.
match fs::create_dir_all(dir) {
Ok(()) => match fs::canonicalize(dir) {
Ok(p) => Self {
cache_enabled: true,
cache_dir: p,
compression_level,
},
Err(err) => {
warn!(
"Failed to canonicalize the cache directory. Disabling cache. \
Message: {}",
err
);
Self::new_cache_disabled()
}
},
Err(err) => {
warn!(
"Failed to create the cache directory. Disabling cache. Message: {}",
err
);
Self::new_cache_disabled()
}
}
}
}
}
lazy_static! {
static ref SELF_MTIME: String = {
std::env::current_exe()
.map_err(|_| warn!("Failed to get path of current executable"))
.ok()
.and_then(|path| {
fs::metadata(&path)
.map_err(|_| warn!("Failed to get metadata of current executable"))
.ok()
})
.and_then(|metadata| {
metadata
.modified()
.map_err(|_| warn!("Failed to get metadata of current executable"))
.ok()
})
.and_then(|mtime| {
Some(match mtime.duration_since(std::time::UNIX_EPOCH) {
Ok(duration) => format!("{}", duration.as_millis()),
Err(err) => format!("m{}", err.duration().as_millis()),
})
})
.unwrap_or("no-mtime".to_string())
};
}
pub struct ModuleCacheEntry {
mod_cache_path: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ModuleCacheData {
compilation: Compilation,
relocations: Relocations,
address_transforms: ModuleAddressMap,
value_ranges: ValueLabelsRanges,
stack_slots: PrimaryMap<DefinedFuncIndex, ir::StackSlots>,
}
type ModuleCacheDataTupleType = (
Compilation,
Relocations,
ModuleAddressMap,
ValueLabelsRanges,
PrimaryMap<DefinedFuncIndex, ir::StackSlots>,
);
struct Sha256Hasher(Sha256);
impl ModuleCacheEntry {
pub fn new<'data>(
module: &Module,
function_body_inputs: &PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>>,
isa: &dyn isa::TargetIsa,
compiler_name: &str,
generate_debug_info: bool,
) -> Self {
let mod_cache_path = if conf::cache_enabled() {
let hash = Sha256Hasher::digest(module, function_body_inputs);
let compiler_dir = if cfg!(debug_assertions) {
format!(
"{comp_name}-{comp_ver}-{comp_mtime}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
comp_mtime = *SELF_MTIME,
)
} else {
format!(
"{comp_name}-{comp_ver}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
)
};
let mod_filename = format!(
"mod-{mod_hash}{mod_dbg}",
mod_hash = base64::encode_config(&hash, base64::URL_SAFE_NO_PAD), // standard encoding uses '/' which can't be used for filename
mod_dbg = if generate_debug_info { ".d" } else { "" },
);
Some(
conf::cache_directory()
.join(isa.triple().to_string())
.join(compiler_dir)
.join(mod_filename),
)
} else {
None
};
Self { mod_cache_path }
}
pub fn get_data(&self) -> Option<ModuleCacheData> {
let path = self.mod_cache_path.as_ref()?;
trace!("get_data() for path: {}", path.display());
let compressed_cache_bytes = fs::read(path).ok()?;
let cache_bytes = zstd::decode_all(&compressed_cache_bytes[..])
.map_err(|err| warn!("Failed to decompress cached code: {}", err))
.ok()?;
bincode::deserialize(&cache_bytes[..])
.map_err(|err| warn!("Failed to deserialize cached code: {}", err))
.ok()
}
pub fn update_data(&self, data: &ModuleCacheData) {
let _ = self.update_data_impl(data);
}
fn update_data_impl(&self, data: &ModuleCacheData) -> Option<()> {
let path = self.mod_cache_path.as_ref()?;
trace!("update_data() for path: {}", path.display());
let serialized_data = bincode::serialize(&data)
.map_err(|err| warn!("Failed to serialize cached code: {}", err))
.ok()?;
let compressed_data = zstd::encode_all(&serialized_data[..], conf::compression_level())
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()?;
// Optimize syscalls: first, try writing to disk. It should succeed in most cases.
// Otherwise, try creating the cache directory and retry writing to the file.
let err = fs::write(path, &compressed_data).err()?; // return on success
debug!(
"Attempting to create the cache directory, because \
failed to write cached code to disk, path: {}, message: {}",
path.display(),
err,
);
let cache_dir = path.parent().unwrap();
fs::create_dir_all(cache_dir)
.map_err(|err| {
warn!(
"Failed to create cache directory, path: {}, message: {}",
cache_dir.display(),
err
)
})
.ok()?;
let err = fs::write(path, &compressed_data).err()?;
warn!(
"Failed to write cached |
struct Config {
pub cache_enabled: bool,
pub cache_dir: PathBuf,
pub compression_level: i32, | random_line_split |
analyze_sample_captures.py | baseband-sig-canvas-')],
[sg.Text("Extracted Frames: ")],
[sg.Multiline(size=(140, 10), key='--CONSOLE_TEXT--', autoscroll=True)]
]
data = None
layout = [
[sg.Text("Capture File: "), sg.Input(key='_CAPTURE_FILE_'), sg.FilesBrowse(), sg.Button("Load", key='_CAPTURE_FILE_LOAD_')],
[sg.Multiline(size=(140, 7), key='--SETTINGS_TEXT--', autoscroll=True)],
[sg.Text('Capture Number:'), sg.Combo(list(range(0, 3000)), key='--FRAME_NUM_VAL--'), sg.Button('Analyze', key='--FRAME_NUM--'), sg.Text('Valid capture indexs:'), sg.Combo(list(range(0, 3000)), key='--Valid-capture-indexs--' ) ],
[sg.TabGroup(
[
[sg.Tab('Raw Samples', raw_samples_layout)],
[sg.Tab('Low Pass filtered samples', filtered_samples_layout)],
[sg.Tab('Time Sync Signal', time_sync_layoyt)],
[sg.Tab('Time Sync Signal - Constellation', time_sync_const)],
[sg.Tab('Base Band Signal', baseband_sig_layout)]
]
)],
]
def plot_samples(frame_index):
# Raw Captures
raw_samples_fig = plt.figure(figsize=(10, 5), dpi=100)
raw_axis_time = raw_samples_fig.add_subplot(2, 1, 1)
raw_axis_freq = raw_samples_fig.add_subplot(2, 1, 2)
raw_samples_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
'''
raw Time signal
'''
raw_samples = np.array(data['raw'][frame_index]).flatten()
total_capture_time = raw_samples.shape[0]/data['sdr_settings']['sample_rate']
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
raw_axis_time.plot(time, np.abs(raw_samples))
raw_axis_time.set_xlabel("Time (seconds)")
raw_axis_time.set_ylabel("Amplitude (V)")
raw_axis_time.grid()
'''
raw PSD signal
'''
raw_axis_freq.clear()
raw_axis_freq.psd(raw_samples, NFFT=raw_samples.shape[0], Fs=data['sdr_settings']['sample_rate']/1e6, Fc=data['sdr_settings']['center_freq']/1e6)
raw_axis_freq.set_xlabel("Freq (MHz)")
raw_axis_freq.set_ylabel("PSD (dB)")
# Time Sync
time_sync_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_axis = time_sync_fig.add_subplot(2, 1, 1)
time_sync_raw_axis = time_sync_fig.add_subplot(2, 1, 2)
time_sync_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
time_sync_samples = np.array(data['time_sync'][frame_index]).flatten()
time = np.linspace(0, total_capture_time, num=time_sync_samples.shape[0])
time_sync_axis.plot(time, np.abs(time_sync_samples), color='orange', label="time sync sig")
time_sync_axis.plot(time, np.abs(time_sync_samples), 'x', color='blue',label="time sync sig (sampled)")
time_sync_axis.set_xlabel("Time (seconds)")
time_sync_axis.set_ylabel("Amplitude (V)")
time_sync_axis.grid()
time_sync_axis.legend()
# Raw signal
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
time_sync_raw_axis.plot(time, np.abs(raw_samples))
#time_sync_raw_axis.plot(time, np.abs(time_sync_samples), 'x', color='red',)
time_sync_raw_axis.set_xlabel("Time (seconds)")
time_sync_raw_axis.set_ylabel("Amplitude (v)")
time_sync_raw_axis.grid()
# Time Sync Constellation
time_sync_const_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_const_axis = time_sync_const_fig.add_subplot(1, 1, 1)
time_sync_const_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
y = np.zeros(time_sync_samples.shape[0])
time_sync_const_axis.scatter(np.abs(time_sync_samples), y,)
time_sync_const_axis.grid()
time_sync_const_axis.set_xlim([-2.5, 2.5])
time_sync_const_axis.set_ylim([-2.5, 2.5])
time_sync_const_axis.set_xlabel("In-Phase")
time_sync_const_axis.set_ylabel("Quadrature")
# Base Band Signal
baseband_sig_fig = plt.figure(figsize=(10, 3), dpi=100)
demod_signal_axis = baseband_sig_fig.add_subplot(1, 1, 1)
baseband_sig_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
baseband_sig_samples = np.array(data['ask_demod'][frame_index]).flatten()
bar1 = demod_signal_axis.bar(np.arange(0, baseband_sig_samples.shape[0]), baseband_sig_samples, color='white', edgecolor='black', label='BaseBand Signal (Bits)')
demod_signal_axis.set_ylim([0, 1.7])
#demod_signal_axis.get_xaxis().set_visible(False)
for rect in bar1:
height = rect.get_height()
demod_signal_axis.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
nrz_signal = np.array(data['ask_demod'][frame_index]).flatten()
sig_match_indexs = search_sequence_cv2(nrz_signal, np.array([1, 1, 1, 0, 1]))
corr_sig = np.zeros(nrz_signal.shape)
for index in sig_match_indexs:
corr_sig[index] = 1
demod_signal_axis.plot(corr_sig, label="Signal correlation with preamble")
demod_signal_axis.legend()
demod_signal_axis.grid()
return (raw_samples_fig, time_sync_fig, time_sync_const_fig, baseband_sig_fig)
def delete_fig_agg(fig_agg):
fig_agg.get_tk_widget().forget()
plt.close('all')
def draw_figure(canvas, figure):
figure_canvas_agg = FigureCanvasTkAgg(figure, canvas)
figure_canvas_agg.draw()
figure_canvas_agg.get_tk_widget().pack(side='top', fill='both', expand=1)
return figure_canvas_agg
window = sg.Window('Sample Captures Analyzer', layout, default_element_size=(40,40), finalize=True)
frames_detected_capture_indxs = []
if __name__ == '__main__':
|
window['--Valid-capture-indexs--'].update(values=frames_detected_capture_indxs)
device_info_string = "Capture time stamp: %s.\n"%(data['cature_time_stamp'])
device_info_string += "Downlink performance => FER: %.3f, Frames detected: %d, Failed Frames: %d. \n"%(data['rx_performance_metrics']['fer'],
data['rx_performance_metrics']['frames_detected'],
data['rx_performance_metrics']['failed_frames'])
device_info_string += 'SDR Settings => Sample rate: %d, Center freq :%d, Gain: %s, Freq Corr: %d, Capture Len: %d.'%(data['sdr_settings']['sample_rate'],
data['sdr_settings']['center_freq'],
data['sdr_settings']['gain'],
data['sdr_settings']['freq_corr'],
len(data['raw'][0]))
device_info_string += '\nBaseband Singal Info => Symbol duration %.6f seconds.'%(data['sdr_settings']['symbol_dur'])
device_info_string += '\nTime Sync Block Info => Alpha: %.2f.'%(data['sdr_settings']['alpha'])
device_info_string += '\nASK Demod Block Info => Thershold: %.2f.'%(data['sdr_settings']['decision_thershold'])
device_info_string += '\nFrame Detector Block Info => Barker seq: %d, CRC Polynomial: %d.'%(data['sdr_settings']['barker_seq'],
data['sdr_settings']['crc_polynomial'])
window['--SETTINGS_TEXT--'].update(device_info_string)
window['--FRAME_NUM_VAL--'].update(values=list(range(0 | raw_samples_fig = None
time_sync_fig = None
time_sync_const_fig = None
baseband_sig_fig = None
while True:
event, values = window.read()
if event == '--FRAME_NUM--' or event == '_CAPTURE_FILE_LOAD_':
if event == '_CAPTURE_FILE_LOAD_':
fileHandler = open(values['_CAPTURE_FILE_'], "rb")
data = pickle.load(fileHandler)
fileHandler.close()
# Find capture indexs where the frames were detected.
for capture_idx, frames in enumerate(data['frame_detection']):
if len(frames) > 0:
frames_detected_capture_indxs.append(capture_idx) | conditional_block |
analyze_sample_captures.py | baseband-sig-canvas-')],
[sg.Text("Extracted Frames: ")],
[sg.Multiline(size=(140, 10), key='--CONSOLE_TEXT--', autoscroll=True)]
]
data = None
layout = [
[sg.Text("Capture File: "), sg.Input(key='_CAPTURE_FILE_'), sg.FilesBrowse(), sg.Button("Load", key='_CAPTURE_FILE_LOAD_')],
[sg.Multiline(size=(140, 7), key='--SETTINGS_TEXT--', autoscroll=True)],
[sg.Text('Capture Number:'), sg.Combo(list(range(0, 3000)), key='--FRAME_NUM_VAL--'), sg.Button('Analyze', key='--FRAME_NUM--'), sg.Text('Valid capture indexs:'), sg.Combo(list(range(0, 3000)), key='--Valid-capture-indexs--' ) ],
[sg.TabGroup(
[
[sg.Tab('Raw Samples', raw_samples_layout)],
[sg.Tab('Low Pass filtered samples', filtered_samples_layout)],
[sg.Tab('Time Sync Signal', time_sync_layoyt)],
[sg.Tab('Time Sync Signal - Constellation', time_sync_const)],
[sg.Tab('Base Band Signal', baseband_sig_layout)]
]
)],
]
def plot_samples(frame_index):
# Raw Captures
raw_samples_fig = plt.figure(figsize=(10, 5), dpi=100)
raw_axis_time = raw_samples_fig.add_subplot(2, 1, 1)
raw_axis_freq = raw_samples_fig.add_subplot(2, 1, 2)
raw_samples_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
'''
raw Time signal
'''
raw_samples = np.array(data['raw'][frame_index]).flatten()
total_capture_time = raw_samples.shape[0]/data['sdr_settings']['sample_rate']
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
raw_axis_time.plot(time, np.abs(raw_samples))
raw_axis_time.set_xlabel("Time (seconds)")
raw_axis_time.set_ylabel("Amplitude (V)")
raw_axis_time.grid()
'''
raw PSD signal
'''
raw_axis_freq.clear()
raw_axis_freq.psd(raw_samples, NFFT=raw_samples.shape[0], Fs=data['sdr_settings']['sample_rate']/1e6, Fc=data['sdr_settings']['center_freq']/1e6)
raw_axis_freq.set_xlabel("Freq (MHz)")
raw_axis_freq.set_ylabel("PSD (dB)")
# Time Sync
time_sync_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_axis = time_sync_fig.add_subplot(2, 1, 1)
time_sync_raw_axis = time_sync_fig.add_subplot(2, 1, 2)
time_sync_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
time_sync_samples = np.array(data['time_sync'][frame_index]).flatten()
time = np.linspace(0, total_capture_time, num=time_sync_samples.shape[0])
time_sync_axis.plot(time, np.abs(time_sync_samples), color='orange', label="time sync sig")
time_sync_axis.plot(time, np.abs(time_sync_samples), 'x', color='blue',label="time sync sig (sampled)")
time_sync_axis.set_xlabel("Time (seconds)")
time_sync_axis.set_ylabel("Amplitude (V)")
time_sync_axis.grid()
time_sync_axis.legend()
# Raw signal
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
time_sync_raw_axis.plot(time, np.abs(raw_samples))
#time_sync_raw_axis.plot(time, np.abs(time_sync_samples), 'x', color='red',)
time_sync_raw_axis.set_xlabel("Time (seconds)")
time_sync_raw_axis.set_ylabel("Amplitude (v)")
time_sync_raw_axis.grid()
# Time Sync Constellation
time_sync_const_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_const_axis = time_sync_const_fig.add_subplot(1, 1, 1)
time_sync_const_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
y = np.zeros(time_sync_samples.shape[0])
time_sync_const_axis.scatter(np.abs(time_sync_samples), y,)
time_sync_const_axis.grid()
time_sync_const_axis.set_xlim([-2.5, 2.5])
time_sync_const_axis.set_ylim([-2.5, 2.5])
time_sync_const_axis.set_xlabel("In-Phase")
time_sync_const_axis.set_ylabel("Quadrature")
# Base Band Signal
baseband_sig_fig = plt.figure(figsize=(10, 3), dpi=100)
demod_signal_axis = baseband_sig_fig.add_subplot(1, 1, 1)
baseband_sig_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
baseband_sig_samples = np.array(data['ask_demod'][frame_index]).flatten()
bar1 = demod_signal_axis.bar(np.arange(0, baseband_sig_samples.shape[0]), baseband_sig_samples, color='white', edgecolor='black', label='BaseBand Signal (Bits)')
demod_signal_axis.set_ylim([0, 1.7])
#demod_signal_axis.get_xaxis().set_visible(False)
for rect in bar1:
height = rect.get_height()
demod_signal_axis.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
nrz_signal = np.array(data['ask_demod'][frame_index]).flatten()
sig_match_indexs = search_sequence_cv2(nrz_signal, np.array([1, 1, 1, 0, 1]))
corr_sig = np.zeros(nrz_signal.shape)
for index in sig_match_indexs:
corr_sig[index] = 1
demod_signal_axis.plot(corr_sig, label="Signal correlation with preamble")
demod_signal_axis.legend()
demod_signal_axis.grid()
return (raw_samples_fig, time_sync_fig, time_sync_const_fig, baseband_sig_fig)
def delete_fig_agg(fig_agg):
fig_agg.get_tk_widget().forget()
plt.close('all')
def draw_figure(canvas, figure):
|
window = sg.Window('Sample Captures Analyzer', layout, default_element_size=(40,40), finalize=True)
frames_detected_capture_indxs = []
if __name__ == '__main__':
raw_samples_fig = None
time_sync_fig = None
time_sync_const_fig = None
baseband_sig_fig = None
while True:
event, values = window.read()
if event == '--FRAME_NUM--' or event == '_CAPTURE_FILE_LOAD_':
if event == '_CAPTURE_FILE_LOAD_':
fileHandler = open(values['_CAPTURE_FILE_'], "rb")
data = pickle.load(fileHandler)
fileHandler.close()
# Find capture indexs where the frames were detected.
for capture_idx, frames in enumerate(data['frame_detection']):
if len(frames) > 0:
frames_detected_capture_indxs.append(capture_idx)
window['--Valid-capture-indexs--'].update(values=frames_detected_capture_indxs)
device_info_string = "Capture time stamp: %s.\n"%(data['cature_time_stamp'])
device_info_string += "Downlink performance => FER: %.3f, Frames detected: %d, Failed Frames: %d. \n"%(data['rx_performance_metrics']['fer'],
data['rx_performance_metrics']['frames_detected'],
data['rx_performance_metrics']['failed_frames'])
device_info_string += 'SDR Settings => Sample rate: %d, Center freq :%d, Gain: %s, Freq Corr: %d, Capture Len: %d.'%(data['sdr_settings']['sample_rate'],
data['sdr_settings']['center_freq'],
data['sdr_settings']['gain'],
data['sdr_settings']['freq_corr'],
len(data['raw'][0]))
device_info_string += '\nBaseband Singal Info => Symbol duration %.6f seconds.'%(data['sdr_settings']['symbol_dur'])
device_info_string += '\nTime Sync Block Info => Alpha: %.2f.'%(data['sdr_settings']['alpha'])
device_info_string += '\nASK Demod Block Info => Thershold: %.2f.'%(data['sdr_settings']['decision_thershold'])
device_info_string += '\nFrame Detector Block Info => Barker seq: %d, CRC Polynomial: %d.'%(data['sdr_settings']['barker_seq'],
data['sdr_settings']['crc_polynomial'])
window['--SETTINGS_TEXT--'].update(device_info_string)
window['--FRAME_NUM_VAL--'].update(values=list(range(0 | figure_canvas_agg = FigureCanvasTkAgg(figure, canvas)
figure_canvas_agg.draw()
figure_canvas_agg.get_tk_widget().pack(side='top', fill='both', expand=1)
return figure_canvas_agg | identifier_body |
analyze_sample_captures.py | baseband-sig-canvas-')],
[sg.Text("Extracted Frames: ")],
[sg.Multiline(size=(140, 10), key='--CONSOLE_TEXT--', autoscroll=True)]
]
data = None
layout = [
[sg.Text("Capture File: "), sg.Input(key='_CAPTURE_FILE_'), sg.FilesBrowse(), sg.Button("Load", key='_CAPTURE_FILE_LOAD_')],
[sg.Multiline(size=(140, 7), key='--SETTINGS_TEXT--', autoscroll=True)],
[sg.Text('Capture Number:'), sg.Combo(list(range(0, 3000)), key='--FRAME_NUM_VAL--'), sg.Button('Analyze', key='--FRAME_NUM--'), sg.Text('Valid capture indexs:'), sg.Combo(list(range(0, 3000)), key='--Valid-capture-indexs--' ) ],
[sg.TabGroup(
[
[sg.Tab('Raw Samples', raw_samples_layout)],
[sg.Tab('Low Pass filtered samples', filtered_samples_layout)],
[sg.Tab('Time Sync Signal', time_sync_layoyt)],
[sg.Tab('Time Sync Signal - Constellation', time_sync_const)],
[sg.Tab('Base Band Signal', baseband_sig_layout)]
]
)],
]
def plot_samples(frame_index):
# Raw Captures
raw_samples_fig = plt.figure(figsize=(10, 5), dpi=100)
raw_axis_time = raw_samples_fig.add_subplot(2, 1, 1)
raw_axis_freq = raw_samples_fig.add_subplot(2, 1, 2)
raw_samples_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
'''
raw Time signal
'''
raw_samples = np.array(data['raw'][frame_index]).flatten()
total_capture_time = raw_samples.shape[0]/data['sdr_settings']['sample_rate']
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
raw_axis_time.plot(time, np.abs(raw_samples))
raw_axis_time.set_xlabel("Time (seconds)")
raw_axis_time.set_ylabel("Amplitude (V)")
raw_axis_time.grid()
'''
raw PSD signal
'''
raw_axis_freq.clear()
raw_axis_freq.psd(raw_samples, NFFT=raw_samples.shape[0], Fs=data['sdr_settings']['sample_rate']/1e6, Fc=data['sdr_settings']['center_freq']/1e6)
raw_axis_freq.set_xlabel("Freq (MHz)")
raw_axis_freq.set_ylabel("PSD (dB)")
# Time Sync
time_sync_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_axis = time_sync_fig.add_subplot(2, 1, 1)
time_sync_raw_axis = time_sync_fig.add_subplot(2, 1, 2)
time_sync_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
time_sync_samples = np.array(data['time_sync'][frame_index]).flatten()
time = np.linspace(0, total_capture_time, num=time_sync_samples.shape[0])
time_sync_axis.plot(time, np.abs(time_sync_samples), color='orange', label="time sync sig")
time_sync_axis.plot(time, np.abs(time_sync_samples), 'x', color='blue',label="time sync sig (sampled)")
time_sync_axis.set_xlabel("Time (seconds)")
time_sync_axis.set_ylabel("Amplitude (V)")
time_sync_axis.grid()
time_sync_axis.legend()
# Raw signal
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
time_sync_raw_axis.plot(time, np.abs(raw_samples))
#time_sync_raw_axis.plot(time, np.abs(time_sync_samples), 'x', color='red',)
time_sync_raw_axis.set_xlabel("Time (seconds)")
time_sync_raw_axis.set_ylabel("Amplitude (v)")
time_sync_raw_axis.grid()
# Time Sync Constellation
time_sync_const_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_const_axis = time_sync_const_fig.add_subplot(1, 1, 1)
time_sync_const_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
y = np.zeros(time_sync_samples.shape[0])
time_sync_const_axis.scatter(np.abs(time_sync_samples), y,)
time_sync_const_axis.grid()
time_sync_const_axis.set_xlim([-2.5, 2.5])
time_sync_const_axis.set_ylim([-2.5, 2.5])
time_sync_const_axis.set_xlabel("In-Phase")
time_sync_const_axis.set_ylabel("Quadrature")
# Base Band Signal
baseband_sig_fig = plt.figure(figsize=(10, 3), dpi=100)
demod_signal_axis = baseband_sig_fig.add_subplot(1, 1, 1)
baseband_sig_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
baseband_sig_samples = np.array(data['ask_demod'][frame_index]).flatten()
bar1 = demod_signal_axis.bar(np.arange(0, baseband_sig_samples.shape[0]), baseband_sig_samples, color='white', edgecolor='black', label='BaseBand Signal (Bits)')
demod_signal_axis.set_ylim([0, 1.7])
#demod_signal_axis.get_xaxis().set_visible(False)
for rect in bar1:
height = rect.get_height()
demod_signal_axis.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
nrz_signal = np.array(data['ask_demod'][frame_index]).flatten()
sig_match_indexs = search_sequence_cv2(nrz_signal, np.array([1, 1, 1, 0, 1]))
corr_sig = np.zeros(nrz_signal.shape)
for index in sig_match_indexs:
corr_sig[index] = 1
demod_signal_axis.plot(corr_sig, label="Signal correlation with preamble")
demod_signal_axis.legend()
demod_signal_axis.grid()
return (raw_samples_fig, time_sync_fig, time_sync_const_fig, baseband_sig_fig)
def delete_fig_agg(fig_agg):
fig_agg.get_tk_widget().forget()
plt.close('all')
def draw_figure(canvas, figure):
figure_canvas_agg = FigureCanvasTkAgg(figure, canvas)
figure_canvas_agg.draw()
figure_canvas_agg.get_tk_widget().pack(side='top', fill='both', expand=1)
return figure_canvas_agg
window = sg.Window('Sample Captures Analyzer', layout, default_element_size=(40,40), finalize=True)
frames_detected_capture_indxs = []
if __name__ == '__main__':
raw_samples_fig = None
time_sync_fig = None
time_sync_const_fig = None
baseband_sig_fig = None
while True:
event, values = window.read()
| data = pickle.load(fileHandler)
fileHandler.close()
# Find capture indexs where the frames were detected.
for capture_idx, frames in enumerate(data['frame_detection']):
if len(frames) > 0:
frames_detected_capture_indxs.append(capture_idx)
window['--Valid-capture-indexs--'].update(values=frames_detected_capture_indxs)
device_info_string = "Capture time stamp: %s.\n"%(data['cature_time_stamp'])
device_info_string += "Downlink performance => FER: %.3f, Frames detected: %d, Failed Frames: %d. \n"%(data['rx_performance_metrics']['fer'],
data['rx_performance_metrics']['frames_detected'],
data['rx_performance_metrics']['failed_frames'])
device_info_string += 'SDR Settings => Sample rate: %d, Center freq :%d, Gain: %s, Freq Corr: %d, Capture Len: %d.'%(data['sdr_settings']['sample_rate'],
data['sdr_settings']['center_freq'],
data['sdr_settings']['gain'],
data['sdr_settings']['freq_corr'],
len(data['raw'][0]))
device_info_string += '\nBaseband Singal Info => Symbol duration %.6f seconds.'%(data['sdr_settings']['symbol_dur'])
device_info_string += '\nTime Sync Block Info => Alpha: %.2f.'%(data['sdr_settings']['alpha'])
device_info_string += '\nASK Demod Block Info => Thershold: %.2f.'%(data['sdr_settings']['decision_thershold'])
device_info_string += '\nFrame Detector Block Info => Barker seq: %d, CRC Polynomial: %d.'%(data['sdr_settings']['barker_seq'],
data['sdr_settings']['crc_polynomial'])
window['--SETTINGS_TEXT--'].update(device_info_string)
window['--FRAME_NUM_VAL--'].update(values=list(range(0 | if event == '--FRAME_NUM--' or event == '_CAPTURE_FILE_LOAD_':
if event == '_CAPTURE_FILE_LOAD_':
fileHandler = open(values['_CAPTURE_FILE_'], "rb") | random_line_split |
analyze_sample_captures.py | baseband-sig-canvas-')],
[sg.Text("Extracted Frames: ")],
[sg.Multiline(size=(140, 10), key='--CONSOLE_TEXT--', autoscroll=True)]
]
data = None
layout = [
[sg.Text("Capture File: "), sg.Input(key='_CAPTURE_FILE_'), sg.FilesBrowse(), sg.Button("Load", key='_CAPTURE_FILE_LOAD_')],
[sg.Multiline(size=(140, 7), key='--SETTINGS_TEXT--', autoscroll=True)],
[sg.Text('Capture Number:'), sg.Combo(list(range(0, 3000)), key='--FRAME_NUM_VAL--'), sg.Button('Analyze', key='--FRAME_NUM--'), sg.Text('Valid capture indexs:'), sg.Combo(list(range(0, 3000)), key='--Valid-capture-indexs--' ) ],
[sg.TabGroup(
[
[sg.Tab('Raw Samples', raw_samples_layout)],
[sg.Tab('Low Pass filtered samples', filtered_samples_layout)],
[sg.Tab('Time Sync Signal', time_sync_layoyt)],
[sg.Tab('Time Sync Signal - Constellation', time_sync_const)],
[sg.Tab('Base Band Signal', baseband_sig_layout)]
]
)],
]
def plot_samples(frame_index):
# Raw Captures
raw_samples_fig = plt.figure(figsize=(10, 5), dpi=100)
raw_axis_time = raw_samples_fig.add_subplot(2, 1, 1)
raw_axis_freq = raw_samples_fig.add_subplot(2, 1, 2)
raw_samples_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
'''
raw Time signal
'''
raw_samples = np.array(data['raw'][frame_index]).flatten()
total_capture_time = raw_samples.shape[0]/data['sdr_settings']['sample_rate']
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
raw_axis_time.plot(time, np.abs(raw_samples))
raw_axis_time.set_xlabel("Time (seconds)")
raw_axis_time.set_ylabel("Amplitude (V)")
raw_axis_time.grid()
'''
raw PSD signal
'''
raw_axis_freq.clear()
raw_axis_freq.psd(raw_samples, NFFT=raw_samples.shape[0], Fs=data['sdr_settings']['sample_rate']/1e6, Fc=data['sdr_settings']['center_freq']/1e6)
raw_axis_freq.set_xlabel("Freq (MHz)")
raw_axis_freq.set_ylabel("PSD (dB)")
# Time Sync
time_sync_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_axis = time_sync_fig.add_subplot(2, 1, 1)
time_sync_raw_axis = time_sync_fig.add_subplot(2, 1, 2)
time_sync_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
time_sync_samples = np.array(data['time_sync'][frame_index]).flatten()
time = np.linspace(0, total_capture_time, num=time_sync_samples.shape[0])
time_sync_axis.plot(time, np.abs(time_sync_samples), color='orange', label="time sync sig")
time_sync_axis.plot(time, np.abs(time_sync_samples), 'x', color='blue',label="time sync sig (sampled)")
time_sync_axis.set_xlabel("Time (seconds)")
time_sync_axis.set_ylabel("Amplitude (V)")
time_sync_axis.grid()
time_sync_axis.legend()
# Raw signal
time = np.linspace(0, total_capture_time, num=raw_samples.shape[0])
time_sync_raw_axis.plot(time, np.abs(raw_samples))
#time_sync_raw_axis.plot(time, np.abs(time_sync_samples), 'x', color='red',)
time_sync_raw_axis.set_xlabel("Time (seconds)")
time_sync_raw_axis.set_ylabel("Amplitude (v)")
time_sync_raw_axis.grid()
# Time Sync Constellation
time_sync_const_fig = plt.figure(figsize=(10, 5), dpi=100)
time_sync_const_axis = time_sync_const_fig.add_subplot(1, 1, 1)
time_sync_const_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
y = np.zeros(time_sync_samples.shape[0])
time_sync_const_axis.scatter(np.abs(time_sync_samples), y,)
time_sync_const_axis.grid()
time_sync_const_axis.set_xlim([-2.5, 2.5])
time_sync_const_axis.set_ylim([-2.5, 2.5])
time_sync_const_axis.set_xlabel("In-Phase")
time_sync_const_axis.set_ylabel("Quadrature")
# Base Band Signal
baseband_sig_fig = plt.figure(figsize=(10, 3), dpi=100)
demod_signal_axis = baseband_sig_fig.add_subplot(1, 1, 1)
baseband_sig_fig.subplots_adjust(left=0.1, right=0.98, top=0.99, bottom=0.1)
baseband_sig_samples = np.array(data['ask_demod'][frame_index]).flatten()
bar1 = demod_signal_axis.bar(np.arange(0, baseband_sig_samples.shape[0]), baseband_sig_samples, color='white', edgecolor='black', label='BaseBand Signal (Bits)')
demod_signal_axis.set_ylim([0, 1.7])
#demod_signal_axis.get_xaxis().set_visible(False)
for rect in bar1:
height = rect.get_height()
demod_signal_axis.text(rect.get_x() + rect.get_width()/2.0, height, '%d' % int(height), ha='center', va='bottom')
nrz_signal = np.array(data['ask_demod'][frame_index]).flatten()
sig_match_indexs = search_sequence_cv2(nrz_signal, np.array([1, 1, 1, 0, 1]))
corr_sig = np.zeros(nrz_signal.shape)
for index in sig_match_indexs:
corr_sig[index] = 1
demod_signal_axis.plot(corr_sig, label="Signal correlation with preamble")
demod_signal_axis.legend()
demod_signal_axis.grid()
return (raw_samples_fig, time_sync_fig, time_sync_const_fig, baseband_sig_fig)
def delete_fig_agg(fig_agg):
fig_agg.get_tk_widget().forget()
plt.close('all')
def | (canvas, figure):
figure_canvas_agg = FigureCanvasTkAgg(figure, canvas)
figure_canvas_agg.draw()
figure_canvas_agg.get_tk_widget().pack(side='top', fill='both', expand=1)
return figure_canvas_agg
window = sg.Window('Sample Captures Analyzer', layout, default_element_size=(40,40), finalize=True)
frames_detected_capture_indxs = []
if __name__ == '__main__':
raw_samples_fig = None
time_sync_fig = None
time_sync_const_fig = None
baseband_sig_fig = None
while True:
event, values = window.read()
if event == '--FRAME_NUM--' or event == '_CAPTURE_FILE_LOAD_':
if event == '_CAPTURE_FILE_LOAD_':
fileHandler = open(values['_CAPTURE_FILE_'], "rb")
data = pickle.load(fileHandler)
fileHandler.close()
# Find capture indexs where the frames were detected.
for capture_idx, frames in enumerate(data['frame_detection']):
if len(frames) > 0:
frames_detected_capture_indxs.append(capture_idx)
window['--Valid-capture-indexs--'].update(values=frames_detected_capture_indxs)
device_info_string = "Capture time stamp: %s.\n"%(data['cature_time_stamp'])
device_info_string += "Downlink performance => FER: %.3f, Frames detected: %d, Failed Frames: %d. \n"%(data['rx_performance_metrics']['fer'],
data['rx_performance_metrics']['frames_detected'],
data['rx_performance_metrics']['failed_frames'])
device_info_string += 'SDR Settings => Sample rate: %d, Center freq :%d, Gain: %s, Freq Corr: %d, Capture Len: %d.'%(data['sdr_settings']['sample_rate'],
data['sdr_settings']['center_freq'],
data['sdr_settings']['gain'],
data['sdr_settings']['freq_corr'],
len(data['raw'][0]))
device_info_string += '\nBaseband Singal Info => Symbol duration %.6f seconds.'%(data['sdr_settings']['symbol_dur'])
device_info_string += '\nTime Sync Block Info => Alpha: %.2f.'%(data['sdr_settings']['alpha'])
device_info_string += '\nASK Demod Block Info => Thershold: %.2f.'%(data['sdr_settings']['decision_thershold'])
device_info_string += '\nFrame Detector Block Info => Barker seq: %d, CRC Polynomial: %d.'%(data['sdr_settings']['barker_seq'],
data['sdr_settings']['crc_polynomial'])
window['--SETTINGS_TEXT--'].update(device_info_string)
window['--FRAME_NUM_VAL--'].update(values=list(range( | draw_figure | identifier_name |
app2mosaic_full_display.py | ():
print """usage: app2mosaic_full_display file
Arguments:
-d id [-point-size ps] Choose dataset (and plot point size)
-node [-inter-node-only] Display node-to-node mosaic
-node-summary Display summary of off/on-node communications
-display-output file Filename used to output plot image
file ap2 xml input file
Description
This script parses output from perftools to obtain the P2P communication data.
The input file is generated via pat_report.
pat_report -f ap2-xml experiment.ap2
You could then run this script on the output.
app2mosaic_full_display -d 0 experiment.ap2-xml
A rank-to-rank mosaic is presented for the dataset indicated by "-d 0".
A node-to-node display is shown if the "-node" argument is present.
The "-node" argument can be accompanied by the "-inter-node-only" qualifier,
which means the plot will only show inter node comms data and exclude any
communications between PEs on the same node.
The "-node-summary" argument displays a stacked bar plot for all the different
datasets that shows which proportion of the communications was on/off-node.
"""
def parse_args():
global ifn
global idata
global node2node
global nodesummary
global internodeonly
global display_output
global user_pt_size
for i in range(1,len(sys.argv)):
arg=sys.argv[i]
if arg == "-d":
i += 1
idata = int(sys.argv[i])
elif arg == "-point-size":
i += 1
user_pt_size = float(sys.argv[i])
elif arg == "-node":
node2node = True
i += 1
internodeonly = ("-inter-node-only" == sys.argv[i].lower())
elif arg == "-node-summary":
node2node = True
nodesummary = True
elif arg == "-display-output":
i += 1
display_output = sys.argv[i]
elif arg == "-h" or arg == "-help":
help()
sys.exit();
else:
ifn = arg
return
def parse_pat_data():
global ifn
global nranks, nnodes
global pe_node_xyz
global pe_node_id
global node_ids
global data_dict
re_pes = re.compile(" pes='(\d+)'")
re_data_table = re.compile("<data_table name='(\S+P2P\S+)'")
re_coll_data_table = re.compile("<data_table name='(\S+Coll\S+)'")
re_data_table_end = re.compile("</data_table")
re_data_tag = re.compile(".*data")
re_data = re.compile("<data ctidx='(\d+)' first_pe='(\d+)'")
re_data_end = re.compile("</data")
re_pdata_node_id = re.compile("<pdata name='node_id' first_pe='(\d+)'")
re_pdata_mesh_xyz = re.compile("<pdata name='mesh_xyz' first_pe='(\d+)'")
re_pdata_end = re.compile("</pdata")
inside_P2P_table = False
data_label = ""
with open(ifn) as fin:
for line in fin:
if "cpat_version" in line:
pat_version_string = line[line.find("'")+1:-2]
print pat_version_string
m = re_pes.match(line)
if m:
nranks = int(m.group(1))
data_2d = np.zeros((nranks,nranks))
continue
m = re_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
m = re_coll_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
# Get mesh_xyz
# pdata name='mesh_xyz' first_pe='<pe>'
# Aries (X optical, Y group, Z backplane)
# num values (or *n values)
# values are of form 0xXXXXYYYYZZZZ no leading zeros
# So 0x200030002 is 2,3,2
m = re_pdata_mesh_xyz.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val,16)
for ij in range(repeat):
pe_node_xyz[j] = v
j += 1
repeat = 1
# Get pdata node_id info for node mapping
# Put if nodal around following
m = re_pdata_node_id.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
# note the *n v can be split over line boundaries
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
for ij in range(repeat):
pe_node_id[j] = v
if v not in node_ids:
node_ids.append(v)
j += 1
repeat = 1
if re_data_table_end.match(line):
if len(data_label) > 0:
data_dict[data_label] = data_2d
data_2d = np.zeros((nranks,nranks))
data_label = ""
inside_P2P_table = False
if inside_P2P_table:
if re_data_tag.match(line):
m = re_data.match(line)
if m:
pe_idx = int(m.group(1))
j = int(m.group(2))
else:
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
row_data = line.rstrip().split(' ')
for line in fin:
if re_data_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
v = float(val)
for ij in range(repeat):
data_2d[pe_idx,j] = v
j += 1
repeat = 1
nnodes = len(node_ids)
return
def dist_xyz(a, b):
dx = abs ( a/0x100000000 - b/0x100000000 )
dy = abs ( a/0x10000%0x10000 -b/0x10000%0x10000 )
dz = abs ( a%0x10000 - b%0x10000 )
return dx, dy, dz
def calculate_network_distances():
global nranks
global pe_node_xyz
global pe_node_id
global data_dict
data_2d = np.zeros((nranks,nranks))
counts_2d = data_dict["Data_P2P_Count"]
for i in range(nranks):
for j in range(nranks):
if counts_2d[i,j] == 0:
continue
dx,dy,dz = dist_xyz(pe_node_xyz[i], pe_node_xyz[j])
if dx > 0:
# pes communicate over rank 3 network (optical)
data_2d[i,j] = 4
elif dy > 0:
# pes communicate over rank 2 network (copper/group)
data_2d[i,j] = 3
elif dz > 0:
# pes communicate over rank 1 network (backplane)
data_2d[i,j] = 2
elif pe_node_id[i] != pe_node_id[j]:
# pes are on same blade
data_2d[i,j] = 2
else:
# pes are on same node
data_2d[i,j] = 1
data_dict["Data_P2P_NetworkDist"] = data_2d
def collate_node_data():
global nranks, nnodes
global pe_node_xyz
global pe_node_id
global data_dict
global node_data_dict
global node_summary_dict
node_data_dict = {}
for key in data_dict:
n2n_data = np.zeros((nnodes,nnodes))
p2p_data = | help | identifier_name |
|
app2mosaic_full_display.py | = int(m.group(1))
data_2d = np.zeros((nranks,nranks))
continue
m = re_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
m = re_coll_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
# Get mesh_xyz
# pdata name='mesh_xyz' first_pe='<pe>'
# Aries (X optical, Y group, Z backplane)
# num values (or *n values)
# values are of form 0xXXXXYYYYZZZZ no leading zeros
# So 0x200030002 is 2,3,2
m = re_pdata_mesh_xyz.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val,16)
for ij in range(repeat):
pe_node_xyz[j] = v
j += 1
repeat = 1
# Get pdata node_id info for node mapping
# Put if nodal around following
m = re_pdata_node_id.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
# note the *n v can be split over line boundaries
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
for ij in range(repeat):
pe_node_id[j] = v
if v not in node_ids:
node_ids.append(v)
j += 1
repeat = 1
if re_data_table_end.match(line):
if len(data_label) > 0:
data_dict[data_label] = data_2d
data_2d = np.zeros((nranks,nranks))
data_label = ""
inside_P2P_table = False
if inside_P2P_table:
if re_data_tag.match(line):
m = re_data.match(line)
if m:
pe_idx = int(m.group(1))
j = int(m.group(2))
else:
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
row_data = line.rstrip().split(' ')
for line in fin:
if re_data_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
v = float(val)
for ij in range(repeat):
data_2d[pe_idx,j] = v
j += 1
repeat = 1
nnodes = len(node_ids)
return
def dist_xyz(a, b):
dx = abs ( a/0x100000000 - b/0x100000000 )
dy = abs ( a/0x10000%0x10000 -b/0x10000%0x10000 )
dz = abs ( a%0x10000 - b%0x10000 )
return dx, dy, dz | def calculate_network_distances():
global nranks
global pe_node_xyz
global pe_node_id
global data_dict
data_2d = np.zeros((nranks,nranks))
counts_2d = data_dict["Data_P2P_Count"]
for i in range(nranks):
for j in range(nranks):
if counts_2d[i,j] == 0:
continue
dx,dy,dz = dist_xyz(pe_node_xyz[i], pe_node_xyz[j])
if dx > 0:
# pes communicate over rank 3 network (optical)
data_2d[i,j] = 4
elif dy > 0:
# pes communicate over rank 2 network (copper/group)
data_2d[i,j] = 3
elif dz > 0:
# pes communicate over rank 1 network (backplane)
data_2d[i,j] = 2
elif pe_node_id[i] != pe_node_id[j]:
# pes are on same blade
data_2d[i,j] = 2
else:
# pes are on same node
data_2d[i,j] = 1
data_dict["Data_P2P_NetworkDist"] = data_2d
def collate_node_data():
global nranks, nnodes
global pe_node_xyz
global pe_node_id
global data_dict
global node_data_dict
global node_summary_dict
node_data_dict = {}
for key in data_dict:
n2n_data = np.zeros((nnodes,nnodes))
p2p_data = data_dict[key]
min_key = "min" in key.lower()
max_key = "max" in key.lower()
network_key = "network" in key.lower()
for i in range(nranks):
for j in range(nranks):
if 0 == p2p_data[i,j]:
continue
inode = pe_node_id[i]
jnode = pe_node_id[j]
if inode==jnode and internodeonly:
continue
ind = node_ids.index(inode)
jnd = node_ids.index(jnode)
if min_key:
if 0 == n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif p2p_data[i,j] < n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif max_key:
if p2p_data[i,j] > n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif network_key:
n2n_data[ind, jnd] = p2p_data[i,j]
else:
n2n_data[ind, jnd] += p2p_data[i,j]
node_data_dict[key] = n2n_data
def get_simple_data_label(label):
global data_label_prefix
return label[len(data_label_prefix):]
def get_snappy_data_label(label):
global data_label_prefix
lb = label[len(data_label_prefix):]
if lb == "SendTime":
lb = "Send Time"
elif lb == "SendTimeMax":
lb = "Maximum Send Time"
elif lb == "SendTimeMin":
lb = "Minimum Send Time"
elif lb == "RecvTime":
lb = "Receive Time"
elif lb == "RecvTimeMax":
lb = "Maximum Receive Time"
elif lb == "RecvTimeMin":
lb = "Minimum Receive Time"
elif lb == "NetworkDist":
lb = "Network Separation"
return lb
def prepare_plot_data(label):
global nranks, nnnodes
global node2node
global data_dict, node_data_dict
global data_label
global data_2d
global data_unit_label
global data_min, data_max
global data_all
data_label = data_label_prefix + label
if node2node:
data_2d = node_data_dict[data_label]
else:
data_2d = data_dict[data_label]
data_unit_label = ""
if "time" in data_label.lower():
# convert times from nanoseconds to seconds
data_2d /= 1.0e9
data_unit_label = "s"
elif "bytes" in data_label.lower():
# convert bytes to megabytes
data_2d /= 1.0e6
data_unit_label = "MB"
data_1d = data_2d.flatten()
data_min = np.min(data_1d)
data_max = np.max(data_1d)
data_all = {}
data_all["cnt"] = len(data_1d[data_1d>0.0])
data_all["xpts"] = np.zeros(data_all["cnt"])
data_all["ypts"] = np.zeros(data_all["cnt"])
data_all["vals"] = np.zeros(data_all["cnt"])
nitems = nnodes if node2node else nranks
k = 0
| random_line_split |
|
app2mosaic_full_display.py | = int(m.group(1))
data_2d = np.zeros((nranks,nranks))
continue
m = re_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
m = re_coll_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
# Get mesh_xyz
# pdata name='mesh_xyz' first_pe='<pe>'
# Aries (X optical, Y group, Z backplane)
# num values (or *n values)
# values are of form 0xXXXXYYYYZZZZ no leading zeros
# So 0x200030002 is 2,3,2
m = re_pdata_mesh_xyz.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val,16)
for ij in range(repeat):
pe_node_xyz[j] = v
j += 1
repeat = 1
# Get pdata node_id info for node mapping
# Put if nodal around following
m = re_pdata_node_id.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
# note the *n v can be split over line boundaries
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
|
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
for ij in range(repeat):
pe_node_id[j] = v
if v not in node_ids:
node_ids.append(v)
j += 1
repeat = 1
if re_data_table_end.match(line):
if len(data_label) > 0:
data_dict[data_label] = data_2d
data_2d = np.zeros((nranks,nranks))
data_label = ""
inside_P2P_table = False
if inside_P2P_table:
if re_data_tag.match(line):
m = re_data.match(line)
if m:
pe_idx = int(m.group(1))
j = int(m.group(2))
else:
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
row_data = line.rstrip().split(' ')
for line in fin:
if re_data_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
v = float(val)
for ij in range(repeat):
data_2d[pe_idx,j] = v
j += 1
repeat = 1
nnodes = len(node_ids)
return
def dist_xyz(a, b):
dx = abs ( a/0x100000000 - b/0x100000000 )
dy = abs ( a/0x10000%0x10000 -b/0x10000%0x10000 )
dz = abs ( a%0x10000 - b%0x10000 )
return dx, dy, dz
def calculate_network_distances():
global nranks
global pe_node_xyz
global pe_node_id
global data_dict
data_2d = np.zeros((nranks,nranks))
counts_2d = data_dict["Data_P2P_Count"]
for i in range(nranks):
for j in range(nranks):
if counts_2d[i,j] == 0:
continue
dx,dy,dz = dist_xyz(pe_node_xyz[i], pe_node_xyz[j])
if dx > 0:
# pes communicate over rank 3 network (optical)
data_2d[i,j] = 4
elif dy > 0:
# pes communicate over rank 2 network (copper/group)
data_2d[i,j] = 3
elif dz > 0:
# pes communicate over rank 1 network (backplane)
data_2d[i,j] = 2
elif pe_node_id[i] != pe_node_id[j]:
# pes are on same blade
data_2d[i,j] = 2
else:
# pes are on same node
data_2d[i,j] = 1
data_dict["Data_P2P_NetworkDist"] = data_2d
def collate_node_data():
global nranks, nnodes
global pe_node_xyz
global pe_node_id
global data_dict
global node_data_dict
global node_summary_dict
node_data_dict = {}
for key in data_dict:
n2n_data = np.zeros((nnodes,nnodes))
p2p_data = data_dict[key]
min_key = "min" in key.lower()
max_key = "max" in key.lower()
network_key = "network" in key.lower()
for i in range(nranks):
for j in range(nranks):
if 0 == p2p_data[i,j]:
continue
inode = pe_node_id[i]
jnode = pe_node_id[j]
if inode==jnode and internodeonly:
continue
ind = node_ids.index(inode)
jnd = node_ids.index(jnode)
if min_key:
if 0 == n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif p2p_data[i,j] < n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif max_key:
if p2p_data[i,j] > n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif network_key:
n2n_data[ind, jnd] = p2p_data[i,j]
else:
n2n_data[ind, jnd] += p2p_data[i,j]
node_data_dict[key] = n2n_data
def get_simple_data_label(label):
global data_label_prefix
return label[len(data_label_prefix):]
def get_snappy_data_label(label):
global data_label_prefix
lb = label[len(data_label_prefix):]
if lb == "SendTime":
lb = "Send Time"
elif lb == "SendTimeMax":
lb = "Maximum Send Time"
elif lb == "SendTimeMin":
lb = "Minimum Send Time"
elif lb == "RecvTime":
lb = "Receive Time"
elif lb == "RecvTimeMax":
lb = "Maximum Receive Time"
elif lb == "RecvTimeMin":
lb = "Minimum Receive Time"
elif lb == "NetworkDist":
lb = "Network Separation"
return lb
def prepare_plot_data(label):
global nranks, nnnodes
global node2node
global data_dict, node_data_dict
global data_label
global data_2d
global data_unit_label
global data_min, data_max
global data_all
data_label = data_label_prefix + label
if node2node:
data_2d = node_data_dict[data_label]
else:
data_2d = data_dict[data_label]
data_unit_label = ""
if "time" in data_label.lower():
# convert times from nanoseconds to seconds
data_2d /= 1.0e9
data_unit_label = "s"
elif "bytes" in data_label.lower():
# convert bytes to megabytes
data_2d /= 1.0e6
data_unit_label = "MB"
data_1d = data_2d.flatten()
data_min = np.min(data_1d)
data_max = np.max(data_1d)
data_all = {}
data_all["cnt"] = len(data_1d[data_1d>0.0])
data_all["xpts"] = np.zeros(data_all["cnt"])
data_all["ypts"] = np.zeros(data_all["cnt"])
data_all["vals"] = np.zeros(data_all["cnt"])
nitems = nnodes if node2node else nranks
k = 0 | row_data += line.rstrip().split(' ') | conditional_block |
app2mosaic_full_display.py | = int(m.group(1))
data_2d = np.zeros((nranks,nranks))
continue
m = re_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
m = re_coll_data_table.match(line)
if m:
inside_P2P_table = True
data_label = m.group(1)
"""
# Get mesh_xyz
# pdata name='mesh_xyz' first_pe='<pe>'
# Aries (X optical, Y group, Z backplane)
# num values (or *n values)
# values are of form 0xXXXXYYYYZZZZ no leading zeros
# So 0x200030002 is 2,3,2
m = re_pdata_mesh_xyz.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val,16)
for ij in range(repeat):
pe_node_xyz[j] = v
j += 1
repeat = 1
# Get pdata node_id info for node mapping
# Put if nodal around following
m = re_pdata_node_id.match(line)
if m:
first_pe = int(m.group(1))
j = first_pe
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
# note the *n v can be split over line boundaries
row_data = []
for line in fin:
if re_pdata_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
for ij in range(repeat):
pe_node_id[j] = v
if v not in node_ids:
node_ids.append(v)
j += 1
repeat = 1
if re_data_table_end.match(line):
if len(data_label) > 0:
data_dict[data_label] = data_2d
data_2d = np.zeros((nranks,nranks))
data_label = ""
inside_P2P_table = False
if inside_P2P_table:
if re_data_tag.match(line):
m = re_data.match(line)
if m:
pe_idx = int(m.group(1))
j = int(m.group(2))
else:
# line+next rows have the actual data
# npes then data per pe with possible *n v repeat counts
row_data = line.rstrip().split(' ')
for line in fin:
if re_data_end.match(line):
break
else:
row_data += line.rstrip().split(' ')
repeat = 1
for val in row_data[1::]:
if val[0] == '*':
repeat = int(val[1:])
continue
v = int(val)
v = float(val)
for ij in range(repeat):
data_2d[pe_idx,j] = v
j += 1
repeat = 1
nnodes = len(node_ids)
return
def dist_xyz(a, b):
dx = abs ( a/0x100000000 - b/0x100000000 )
dy = abs ( a/0x10000%0x10000 -b/0x10000%0x10000 )
dz = abs ( a%0x10000 - b%0x10000 )
return dx, dy, dz
def calculate_network_distances():
| data_2d[i,j] = 3
elif dz > 0:
# pes communicate over rank 1 network (backplane)
data_2d[i,j] = 2
elif pe_node_id[i] != pe_node_id[j]:
# pes are on same blade
data_2d[i,j] = 2
else:
# pes are on same node
data_2d[i,j] = 1
data_dict["Data_P2P_NetworkDist"] = data_2d
def collate_node_data():
global nranks, nnodes
global pe_node_xyz
global pe_node_id
global data_dict
global node_data_dict
global node_summary_dict
node_data_dict = {}
for key in data_dict:
n2n_data = np.zeros((nnodes,nnodes))
p2p_data = data_dict[key]
min_key = "min" in key.lower()
max_key = "max" in key.lower()
network_key = "network" in key.lower()
for i in range(nranks):
for j in range(nranks):
if 0 == p2p_data[i,j]:
continue
inode = pe_node_id[i]
jnode = pe_node_id[j]
if inode==jnode and internodeonly:
continue
ind = node_ids.index(inode)
jnd = node_ids.index(jnode)
if min_key:
if 0 == n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif p2p_data[i,j] < n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif max_key:
if p2p_data[i,j] > n2n_data[ind, jnd]:
n2n_data[ind, jnd] = p2p_data[i,j]
elif network_key:
n2n_data[ind, jnd] = p2p_data[i,j]
else:
n2n_data[ind, jnd] += p2p_data[i,j]
node_data_dict[key] = n2n_data
def get_simple_data_label(label):
global data_label_prefix
return label[len(data_label_prefix):]
def get_snappy_data_label(label):
global data_label_prefix
lb = label[len(data_label_prefix):]
if lb == "SendTime":
lb = "Send Time"
elif lb == "SendTimeMax":
lb = "Maximum Send Time"
elif lb == "SendTimeMin":
lb = "Minimum Send Time"
elif lb == "RecvTime":
lb = "Receive Time"
elif lb == "RecvTimeMax":
lb = "Maximum Receive Time"
elif lb == "RecvTimeMin":
lb = "Minimum Receive Time"
elif lb == "NetworkDist":
lb = "Network Separation"
return lb
def prepare_plot_data(label):
global nranks, nnnodes
global node2node
global data_dict, node_data_dict
global data_label
global data_2d
global data_unit_label
global data_min, data_max
global data_all
data_label = data_label_prefix + label
if node2node:
data_2d = node_data_dict[data_label]
else:
data_2d = data_dict[data_label]
data_unit_label = ""
if "time" in data_label.lower():
# convert times from nanoseconds to seconds
data_2d /= 1.0e9
data_unit_label = "s"
elif "bytes" in data_label.lower():
# convert bytes to megabytes
data_2d /= 1.0e6
data_unit_label = "MB"
data_1d = data_2d.flatten()
data_min = np.min(data_1d)
data_max = np.max(data_1d)
data_all = {}
data_all["cnt"] = len(data_1d[data_1d>0.0])
data_all["xpts"] = np.zeros(data_all["cnt"])
data_all["ypts"] = np.zeros(data_all["cnt"])
data_all["vals"] = np.zeros(data_all["cnt"])
nitems = nnodes if node2node else nranks
k = 0
| global nranks
global pe_node_xyz
global pe_node_id
global data_dict
data_2d = np.zeros((nranks,nranks))
counts_2d = data_dict["Data_P2P_Count"]
for i in range(nranks):
for j in range(nranks):
if counts_2d[i,j] == 0:
continue
dx,dy,dz = dist_xyz(pe_node_xyz[i], pe_node_xyz[j])
if dx > 0:
# pes communicate over rank 3 network (optical)
data_2d[i,j] = 4
elif dy > 0:
# pes communicate over rank 2 network (copper/group) | identifier_body |
main.rs | )]
#![recursion_limit="512"]
// starting doc {{{
//! ARES: Automatic REcord System.
//!
//! A Kubernetes-native system to automatically create and manage DNS records
//! meant to run in parallel with External DNS.
//!
//! Configuration is managed through the ares-secret Secret, typically in the
//! default namespace. This may change in the future to default to the
//! namespace that ARES is deployed in.
//!
//! ## Configuration
//!
//! A configuration file should look like this:
//!
//! ```yaml
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! The corresponding Secret can look like:
//!
//! ```yaml
//! apiVersion: v1
//! kind: Secret
//! metadata:
//! name: ares-secret
//! stringData:
//! ares.yaml: |-
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! If you want to control multiple domain zones across multiple different
//! providers, you can add another element into the default array and
//! configure another provider there. You can configure multiple domain zones
//! through a single provider.
//!
//! ## Custom Resource Definitions
//!
//! ARES watches over the syntixi.io/v1alpha1/Record CustomResourceDefinition
//! to know which domain names to add, remove, or modify. An example resource
//! is below.
//!
//! ```yaml
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example
//! spec:
//! fqdn: example.syntixi.io
//! ttl: 100
//! type: CNAME
//! value:
//! - syntixi.io
//! ```
//!
//! For addresses that can change, such as Nodes that Pods may be running on,
//! it is recommended to instead use a valueFrom selector, such as the
//! PodSelector. The example below includes a Pod and a Record that points to
//! the Node the Pod is running on, with a Selector similar to that in the
//! Kubernetes
//! [documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/).
//!
//! This should not be used for inbound traffic (for that, you should use a
//! LoadBalancer Service or an Ingress record, with external-dns). This is,
//! however, useful for making SPF records point to an outbound mail record,
//! where the mail can be sent from one of many Nodes.
//!
//! ```yaml
//! apiVersion: v1
//! kind: Pod
//! metadata:
//! name: nginx-hello-world
//! app: nginx
//! spec:
//! containers:
//! - name: nginx
//! image: nginxdemos/hello
//! ---
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example-selector
//! spec:
//! fqdn: selector.syntixi.io
//! ttl: 1
//! valueFrom:
//! podSelector:
//! matchLabels:
//! app: nginx
//! ```
//!
//! When a syntixi.io/v1alpha1/Record resource is created, an additional record
//! is made for ARES to track ownership over the DNS record. So long as that
//! tracking record exists, when the Kubernetes resource is deleted, the
//! corresponding record and tracking record will be deleted.
// }}}
// imports {{{
use clap::Clap;
use std::ops::Deref;
use std::sync::Arc;
use slog::{
crit, debug, error, info, log, o,
Drain, Logger,
};
use anyhow::{anyhow, Result};
use futures::{
StreamExt, TryStreamExt, select,
future::{Future, Fuse, join_all},
};
use k8s_openapi::api::core::v1::{Event, Secret};
use kube::{
api::{Api, ListParams, Meta, WatchEvent},
Client,
};
use kube_runtime::{utils::try_flatten_applied, watcher};
use kube_derive::{CustomResource};
mod cli;
mod xpathable;
mod providers;
mod program_config;
mod record_spec;
use program_config::AresConfig;
use providers::{ProviderConfig, util::{ProviderBackend, ZoneDomainName,
RecordType, Record as RecordObject}};
use record_spec::{Record, RecordValueCollector};
// }}}
#[tokio::main]
async fn | () -> Result<()> {
let opts: cli::Opts = cli::Opts::parse();
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
let root_logger = slog::Logger::root(
drain,
o!("secret" => opts.secret.clone(),
"secret_key" => opts.secret_key.clone(),
"secret_namespace" => opts.secret_namespace.clone()),
);
let client = Client::try_default().await?;
info!(root_logger, "Loading configuration from Secret");
let secrets: Api<Secret> = Api::namespaced(client, opts.secret_namespace.as_str());
let secret = secrets.get(opts.secret.as_str()).await?;
let config_data = secret
.data
.as_ref()
.ok_or(anyhow!("Unable to get data from Secret"))?;
let config_content = config_data
.get(opts.secret_key.as_str())
.ok_or(anyhow!("Unable to get key from Secret"))?
.clone().0;
debug!(root_logger, "Configuration loaded from Secret");
let config: Vec<Arc<AresConfig>> =
serde_yaml::from_str::<Vec<_>>(std::str::from_utf8(&config_content[..])?)?
.into_iter()
.map(Arc::new)
.collect();
let records: Api<Record> = Api::all(Client::try_default().await?);
let record_list: Vec<Arc<Record>> = records.list(&ListParams::default()).await?
.items
.into_iter()
.map(Arc::new)
.collect();
let mut handles = vec![];
// TODO watch over config and reload when changes are made
for ares in config.into_iter() {
// Find all matching Records and put a ref of them into a Vec
let allowed_records: Vec<Arc<Record>> = record_list
.iter()
.filter(|record| ares.matches_selector(record.spec.fqdn.as_str()))
.map(|x| x.clone()) // clone() of Arc<> is intentional
.collect();
// TODO put a watcher over records instead of just getting them at program start
for mut record in allowed_records {
// Generate a proxy logger to be cloned so we can build upon it every loop
let proxy_logger = root_logger.new(o!());
let sub_ac = ares.clone(); // clone of Arc<> is intentional
handles.push(tokio::spawn(async move {
loop {
let sub_logger = proxy_logger.new(o!("record" => record.spec.fqdn.clone()));
if let Some(collector_obj) = &record.spec.value_from {
let collector = collector_obj.deref();
info!(sub_logger, "Getting zone domain name");
let zone = match sub_ac.provider.get_zone(&record.spec.fqdn).await {
Ok(z) => z,
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
};
let mut builder = RecordObject::builder(record.spec.fqdn.clone(),
zone, RecordType::A);
// Syncing should happen regardless of using a watcher to ensure that any
// extra records are deleted.
info!(sub_logger, "Syncing");
let sync_state = collector.sync(&record.metadata, &sub_ac.provider,
&mut builder).await;
if let Err(e) = sync_state {
crit!(sub_logger, "Error! {}", e);
break
}
info!(sub_logger, "Finished syncing");
info!(sub_logger, "Spawning watcher");
let res = collector.watch_values(&record.metadata, &sub_ac.provider,
&mut builder).await;
info!(sub_logger, "Stopped watching");
// Set a new record if the watcher stops; this could be the result of a
// timeout or a change in the Record value, which may need a refresh.
record = match res {
Ok(r) => Arc::new(r),
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
}
}
}
}));
}
}
let secret_logger = root_logger.new(o!());
handles.push(tokio::spawn(async move {
loop {
info!(secret_logger, "Watching over Secrets to detect configuration changes");
let mut secret_watcher = secrets
.watch(&ListParams::default(), "0")
.await
.unwrap()
.boxed();
while let Ok(Some(secret_status)) = secret_watcher.try_next().await {
// If the configuration changes, trigger a panic which will cause a restart.
match secret_status {
WatchEvent::Modified(modified) => {
if modified.metadata.uid == secret.metadata.uid {
info!(secret_logger, "Found config change, terminating");
std::process::exit(0);
}
},
WatchEvent:: | main | identifier_name |
main.rs | )]
#![recursion_limit="512"]
// starting doc {{{
//! ARES: Automatic REcord System.
//!
//! A Kubernetes-native system to automatically create and manage DNS records
//! meant to run in parallel with External DNS.
//!
//! Configuration is managed through the ares-secret Secret, typically in the
//! default namespace. This may change in the future to default to the
//! namespace that ARES is deployed in.
//!
//! ## Configuration
//!
//! A configuration file should look like this:
//!
//! ```yaml
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! The corresponding Secret can look like:
//!
//! ```yaml
//! apiVersion: v1
//! kind: Secret
//! metadata:
//! name: ares-secret
//! stringData:
//! ares.yaml: |-
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! If you want to control multiple domain zones across multiple different
//! providers, you can add another element into the default array and
//! configure another provider there. You can configure multiple domain zones
//! through a single provider.
//!
//! ## Custom Resource Definitions
//!
//! ARES watches over the syntixi.io/v1alpha1/Record CustomResourceDefinition
//! to know which domain names to add, remove, or modify. An example resource
//! is below.
//!
//! ```yaml
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example
//! spec:
//! fqdn: example.syntixi.io
//! ttl: 100
//! type: CNAME
//! value:
//! - syntixi.io
//! ```
//!
//! For addresses that can change, such as Nodes that Pods may be running on,
//! it is recommended to instead use a valueFrom selector, such as the
//! PodSelector. The example below includes a Pod and a Record that points to
//! the Node the Pod is running on, with a Selector similar to that in the
//! Kubernetes
//! [documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/).
//!
//! This should not be used for inbound traffic (for that, you should use a
//! LoadBalancer Service or an Ingress record, with external-dns). This is,
//! however, useful for making SPF records point to an outbound mail record,
//! where the mail can be sent from one of many Nodes.
//!
//! ```yaml
//! apiVersion: v1
//! kind: Pod
//! metadata:
//! name: nginx-hello-world
//! app: nginx
//! spec:
//! containers:
//! - name: nginx
//! image: nginxdemos/hello
//! ---
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example-selector
//! spec:
//! fqdn: selector.syntixi.io
//! ttl: 1
//! valueFrom:
//! podSelector:
//! matchLabels:
//! app: nginx
//! ```
//!
//! When a syntixi.io/v1alpha1/Record resource is created, an additional record
//! is made for ARES to track ownership over the DNS record. So long as that
//! tracking record exists, when the Kubernetes resource is deleted, the
//! corresponding record and tracking record will be deleted.
// }}}
// imports {{{
use clap::Clap;
use std::ops::Deref;
use std::sync::Arc;
use slog::{
crit, debug, error, info, log, o,
Drain, Logger,
};
use anyhow::{anyhow, Result};
use futures::{
StreamExt, TryStreamExt, select,
future::{Future, Fuse, join_all},
};
use k8s_openapi::api::core::v1::{Event, Secret};
use kube::{
api::{Api, ListParams, Meta, WatchEvent},
Client,
};
use kube_runtime::{utils::try_flatten_applied, watcher};
use kube_derive::{CustomResource};
mod cli;
mod xpathable;
mod providers;
mod program_config;
mod record_spec;
use program_config::AresConfig;
use providers::{ProviderConfig, util::{ProviderBackend, ZoneDomainName,
RecordType, Record as RecordObject}};
use record_spec::{Record, RecordValueCollector};
// }}}
#[tokio::main]
async fn main() -> Result<()> {
let opts: cli::Opts = cli::Opts::parse();
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
let root_logger = slog::Logger::root(
drain,
o!("secret" => opts.secret.clone(),
"secret_key" => opts.secret_key.clone(),
"secret_namespace" => opts.secret_namespace.clone()),
);
let client = Client::try_default().await?;
info!(root_logger, "Loading configuration from Secret");
let secrets: Api<Secret> = Api::namespaced(client, opts.secret_namespace.as_str());
let secret = secrets.get(opts.secret.as_str()).await?;
let config_data = secret
.data
.as_ref()
.ok_or(anyhow!("Unable to get data from Secret"))?;
let config_content = config_data
.get(opts.secret_key.as_str())
.ok_or(anyhow!("Unable to get key from Secret"))?
.clone().0;
debug!(root_logger, "Configuration loaded from Secret");
let config: Vec<Arc<AresConfig>> =
serde_yaml::from_str::<Vec<_>>(std::str::from_utf8(&config_content[..])?)?
.into_iter()
.map(Arc::new)
.collect();
let records: Api<Record> = Api::all(Client::try_default().await?);
let record_list: Vec<Arc<Record>> = records.list(&ListParams::default()).await?
.items
.into_iter()
.map(Arc::new)
.collect();
let mut handles = vec![];
// TODO watch over config and reload when changes are made
for ares in config.into_iter() {
// Find all matching Records and put a ref of them into a Vec
let allowed_records: Vec<Arc<Record>> = record_list
.iter()
.filter(|record| ares.matches_selector(record.spec.fqdn.as_str()))
.map(|x| x.clone()) // clone() of Arc<> is intentional
.collect();
// TODO put a watcher over records instead of just getting them at program start
for mut record in allowed_records {
// Generate a proxy logger to be cloned so we can build upon it every loop
let proxy_logger = root_logger.new(o!());
let sub_ac = ares.clone(); // clone of Arc<> is intentional
handles.push(tokio::spawn(async move {
loop {
let sub_logger = proxy_logger.new(o!("record" => record.spec.fqdn.clone()));
if let Some(collector_obj) = &record.spec.value_from {
let collector = collector_obj.deref();
info!(sub_logger, "Getting zone domain name");
let zone = match sub_ac.provider.get_zone(&record.spec.fqdn).await {
Ok(z) => z,
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
};
let mut builder = RecordObject::builder(record.spec.fqdn.clone(),
zone, RecordType::A);
// Syncing should happen regardless of using a watcher to ensure that any
// extra records are deleted.
info!(sub_logger, "Syncing");
let sync_state = collector.sync(&record.metadata, &sub_ac.provider,
&mut builder).await;
if let Err(e) = sync_state |
info!(sub_logger, "Finished syncing");
info!(sub_logger, "Spawning watcher");
let res = collector.watch_values(&record.metadata, &sub_ac.provider,
&mut builder).await;
info!(sub_logger, "Stopped watching");
// Set a new record if the watcher stops; this could be the result of a
// timeout or a change in the Record value, which may need a refresh.
record = match res {
Ok(r) => Arc::new(r),
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
}
}
}
}));
}
}
let secret_logger = root_logger.new(o!());
handles.push(tokio::spawn(async move {
loop {
info!(secret_logger, "Watching over Secrets to detect configuration changes");
let mut secret_watcher = secrets
.watch(&ListParams::default(), "0")
.await
.unwrap()
.boxed();
while let Ok(Some(secret_status)) = secret_watcher.try_next().await {
// If the configuration changes, trigger a panic which will cause a restart.
match secret_status {
WatchEvent::Modified(modified) => {
if modified.metadata.uid == secret.metadata.uid {
info!(secret_logger, "Found config change, terminating");
std::process::exit(0);
}
},
WatchEvent | {
crit!(sub_logger, "Error! {}", e);
break
} | conditional_block |
main.rs | _docs)]
#![recursion_limit="512"]
// starting doc {{{
//! ARES: Automatic REcord System.
//!
//! A Kubernetes-native system to automatically create and manage DNS records
//! meant to run in parallel with External DNS.
//!
//! Configuration is managed through the ares-secret Secret, typically in the
//! default namespace. This may change in the future to default to the
//! namespace that ARES is deployed in.
//!
//! ## Configuration
//!
//! A configuration file should look like this:
//!
//! ```yaml
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! The corresponding Secret can look like:
//!
//! ```yaml
//! apiVersion: v1
//! kind: Secret
//! metadata:
//! name: ares-secret
//! stringData:
//! ares.yaml: |-
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! If you want to control multiple domain zones across multiple different
//! providers, you can add another element into the default array and
//! configure another provider there. You can configure multiple domain zones
//! through a single provider.
//!
//! ## Custom Resource Definitions
//!
//! ARES watches over the syntixi.io/v1alpha1/Record CustomResourceDefinition
//! to know which domain names to add, remove, or modify. An example resource
//! is below.
//!
//! ```yaml
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example
//! spec:
//! fqdn: example.syntixi.io
//! ttl: 100
//! type: CNAME
//! value:
//! - syntixi.io
//! ```
//!
//! For addresses that can change, such as Nodes that Pods may be running on,
//! it is recommended to instead use a valueFrom selector, such as the
//! PodSelector. The example below includes a Pod and a Record that points to
//! the Node the Pod is running on, with a Selector similar to that in the
//! Kubernetes
//! [documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/).
//!
//! This should not be used for inbound traffic (for that, you should use a
//! LoadBalancer Service or an Ingress record, with external-dns). This is, | //! ```yaml
//! apiVersion: v1
//! kind: Pod
//! metadata:
//! name: nginx-hello-world
//! app: nginx
//! spec:
//! containers:
//! - name: nginx
//! image: nginxdemos/hello
//! ---
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example-selector
//! spec:
//! fqdn: selector.syntixi.io
//! ttl: 1
//! valueFrom:
//! podSelector:
//! matchLabels:
//! app: nginx
//! ```
//!
//! When a syntixi.io/v1alpha1/Record resource is created, an additional record
//! is made for ARES to track ownership over the DNS record. So long as that
//! tracking record exists, when the Kubernetes resource is deleted, the
//! corresponding record and tracking record will be deleted.
// }}}
// imports {{{
use clap::Clap;
use std::ops::Deref;
use std::sync::Arc;
use slog::{
crit, debug, error, info, log, o,
Drain, Logger,
};
use anyhow::{anyhow, Result};
use futures::{
StreamExt, TryStreamExt, select,
future::{Future, Fuse, join_all},
};
use k8s_openapi::api::core::v1::{Event, Secret};
use kube::{
api::{Api, ListParams, Meta, WatchEvent},
Client,
};
use kube_runtime::{utils::try_flatten_applied, watcher};
use kube_derive::{CustomResource};
mod cli;
mod xpathable;
mod providers;
mod program_config;
mod record_spec;
use program_config::AresConfig;
use providers::{ProviderConfig, util::{ProviderBackend, ZoneDomainName,
RecordType, Record as RecordObject}};
use record_spec::{Record, RecordValueCollector};
// }}}
#[tokio::main]
async fn main() -> Result<()> {
let opts: cli::Opts = cli::Opts::parse();
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
let root_logger = slog::Logger::root(
drain,
o!("secret" => opts.secret.clone(),
"secret_key" => opts.secret_key.clone(),
"secret_namespace" => opts.secret_namespace.clone()),
);
let client = Client::try_default().await?;
info!(root_logger, "Loading configuration from Secret");
let secrets: Api<Secret> = Api::namespaced(client, opts.secret_namespace.as_str());
let secret = secrets.get(opts.secret.as_str()).await?;
let config_data = secret
.data
.as_ref()
.ok_or(anyhow!("Unable to get data from Secret"))?;
let config_content = config_data
.get(opts.secret_key.as_str())
.ok_or(anyhow!("Unable to get key from Secret"))?
.clone().0;
debug!(root_logger, "Configuration loaded from Secret");
let config: Vec<Arc<AresConfig>> =
serde_yaml::from_str::<Vec<_>>(std::str::from_utf8(&config_content[..])?)?
.into_iter()
.map(Arc::new)
.collect();
let records: Api<Record> = Api::all(Client::try_default().await?);
let record_list: Vec<Arc<Record>> = records.list(&ListParams::default()).await?
.items
.into_iter()
.map(Arc::new)
.collect();
let mut handles = vec![];
// TODO watch over config and reload when changes are made
for ares in config.into_iter() {
// Find all matching Records and put a ref of them into a Vec
let allowed_records: Vec<Arc<Record>> = record_list
.iter()
.filter(|record| ares.matches_selector(record.spec.fqdn.as_str()))
.map(|x| x.clone()) // clone() of Arc<> is intentional
.collect();
// TODO put a watcher over records instead of just getting them at program start
for mut record in allowed_records {
// Generate a proxy logger to be cloned so we can build upon it every loop
let proxy_logger = root_logger.new(o!());
let sub_ac = ares.clone(); // clone of Arc<> is intentional
handles.push(tokio::spawn(async move {
loop {
let sub_logger = proxy_logger.new(o!("record" => record.spec.fqdn.clone()));
if let Some(collector_obj) = &record.spec.value_from {
let collector = collector_obj.deref();
info!(sub_logger, "Getting zone domain name");
let zone = match sub_ac.provider.get_zone(&record.spec.fqdn).await {
Ok(z) => z,
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
};
let mut builder = RecordObject::builder(record.spec.fqdn.clone(),
zone, RecordType::A);
// Syncing should happen regardless of using a watcher to ensure that any
// extra records are deleted.
info!(sub_logger, "Syncing");
let sync_state = collector.sync(&record.metadata, &sub_ac.provider,
&mut builder).await;
if let Err(e) = sync_state {
crit!(sub_logger, "Error! {}", e);
break
}
info!(sub_logger, "Finished syncing");
info!(sub_logger, "Spawning watcher");
let res = collector.watch_values(&record.metadata, &sub_ac.provider,
&mut builder).await;
info!(sub_logger, "Stopped watching");
// Set a new record if the watcher stops; this could be the result of a
// timeout or a change in the Record value, which may need a refresh.
record = match res {
Ok(r) => Arc::new(r),
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
}
}
}
}));
}
}
let secret_logger = root_logger.new(o!());
handles.push(tokio::spawn(async move {
loop {
info!(secret_logger, "Watching over Secrets to detect configuration changes");
let mut secret_watcher = secrets
.watch(&ListParams::default(), "0")
.await
.unwrap()
.boxed();
while let Ok(Some(secret_status)) = secret_watcher.try_next().await {
// If the configuration changes, trigger a panic which will cause a restart.
match secret_status {
WatchEvent::Modified(modified) => {
if modified.metadata.uid == secret.metadata.uid {
info!(secret_logger, "Found config change, terminating");
std::process::exit(0);
}
},
WatchEvent:: | //! however, useful for making SPF records point to an outbound mail record,
//! where the mail can be sent from one of many Nodes.
//! | random_line_split |
main.rs | )]
#![recursion_limit="512"]
// starting doc {{{
//! ARES: Automatic REcord System.
//!
//! A Kubernetes-native system to automatically create and manage DNS records
//! meant to run in parallel with External DNS.
//!
//! Configuration is managed through the ares-secret Secret, typically in the
//! default namespace. This may change in the future to default to the
//! namespace that ARES is deployed in.
//!
//! ## Configuration
//!
//! A configuration file should look like this:
//!
//! ```yaml
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! The corresponding Secret can look like:
//!
//! ```yaml
//! apiVersion: v1
//! kind: Secret
//! metadata:
//! name: ares-secret
//! stringData:
//! ares.yaml: |-
//! - selector:
//! - syntixi.io
//! provider: cloudflare
//! providerOptions:
//! apiToken: ***
//! ```
//!
//! If you want to control multiple domain zones across multiple different
//! providers, you can add another element into the default array and
//! configure another provider there. You can configure multiple domain zones
//! through a single provider.
//!
//! ## Custom Resource Definitions
//!
//! ARES watches over the syntixi.io/v1alpha1/Record CustomResourceDefinition
//! to know which domain names to add, remove, or modify. An example resource
//! is below.
//!
//! ```yaml
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example
//! spec:
//! fqdn: example.syntixi.io
//! ttl: 100
//! type: CNAME
//! value:
//! - syntixi.io
//! ```
//!
//! For addresses that can change, such as Nodes that Pods may be running on,
//! it is recommended to instead use a valueFrom selector, such as the
//! PodSelector. The example below includes a Pod and a Record that points to
//! the Node the Pod is running on, with a Selector similar to that in the
//! Kubernetes
//! [documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/).
//!
//! This should not be used for inbound traffic (for that, you should use a
//! LoadBalancer Service or an Ingress record, with external-dns). This is,
//! however, useful for making SPF records point to an outbound mail record,
//! where the mail can be sent from one of many Nodes.
//!
//! ```yaml
//! apiVersion: v1
//! kind: Pod
//! metadata:
//! name: nginx-hello-world
//! app: nginx
//! spec:
//! containers:
//! - name: nginx
//! image: nginxdemos/hello
//! ---
//! apiVersion: syntixi.io/v1alpha1
//! kind: Record
//! metadata:
//! name: example-selector
//! spec:
//! fqdn: selector.syntixi.io
//! ttl: 1
//! valueFrom:
//! podSelector:
//! matchLabels:
//! app: nginx
//! ```
//!
//! When a syntixi.io/v1alpha1/Record resource is created, an additional record
//! is made for ARES to track ownership over the DNS record. So long as that
//! tracking record exists, when the Kubernetes resource is deleted, the
//! corresponding record and tracking record will be deleted.
// }}}
// imports {{{
use clap::Clap;
use std::ops::Deref;
use std::sync::Arc;
use slog::{
crit, debug, error, info, log, o,
Drain, Logger,
};
use anyhow::{anyhow, Result};
use futures::{
StreamExt, TryStreamExt, select,
future::{Future, Fuse, join_all},
};
use k8s_openapi::api::core::v1::{Event, Secret};
use kube::{
api::{Api, ListParams, Meta, WatchEvent},
Client,
};
use kube_runtime::{utils::try_flatten_applied, watcher};
use kube_derive::{CustomResource};
mod cli;
mod xpathable;
mod providers;
mod program_config;
mod record_spec;
use program_config::AresConfig;
use providers::{ProviderConfig, util::{ProviderBackend, ZoneDomainName,
RecordType, Record as RecordObject}};
use record_spec::{Record, RecordValueCollector};
// }}}
#[tokio::main]
async fn main() -> Result<()> | let config_content = config_data
.get(opts.secret_key.as_str())
.ok_or(anyhow!("Unable to get key from Secret"))?
.clone().0;
debug!(root_logger, "Configuration loaded from Secret");
let config: Vec<Arc<AresConfig>> =
serde_yaml::from_str::<Vec<_>>(std::str::from_utf8(&config_content[..])?)?
.into_iter()
.map(Arc::new)
.collect();
let records: Api<Record> = Api::all(Client::try_default().await?);
let record_list: Vec<Arc<Record>> = records.list(&ListParams::default()).await?
.items
.into_iter()
.map(Arc::new)
.collect();
let mut handles = vec![];
// TODO watch over config and reload when changes are made
for ares in config.into_iter() {
// Find all matching Records and put a ref of them into a Vec
let allowed_records: Vec<Arc<Record>> = record_list
.iter()
.filter(|record| ares.matches_selector(record.spec.fqdn.as_str()))
.map(|x| x.clone()) // clone() of Arc<> is intentional
.collect();
// TODO put a watcher over records instead of just getting them at program start
for mut record in allowed_records {
// Generate a proxy logger to be cloned so we can build upon it every loop
let proxy_logger = root_logger.new(o!());
let sub_ac = ares.clone(); // clone of Arc<> is intentional
handles.push(tokio::spawn(async move {
loop {
let sub_logger = proxy_logger.new(o!("record" => record.spec.fqdn.clone()));
if let Some(collector_obj) = &record.spec.value_from {
let collector = collector_obj.deref();
info!(sub_logger, "Getting zone domain name");
let zone = match sub_ac.provider.get_zone(&record.spec.fqdn).await {
Ok(z) => z,
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
};
let mut builder = RecordObject::builder(record.spec.fqdn.clone(),
zone, RecordType::A);
// Syncing should happen regardless of using a watcher to ensure that any
// extra records are deleted.
info!(sub_logger, "Syncing");
let sync_state = collector.sync(&record.metadata, &sub_ac.provider,
&mut builder).await;
if let Err(e) = sync_state {
crit!(sub_logger, "Error! {}", e);
break
}
info!(sub_logger, "Finished syncing");
info!(sub_logger, "Spawning watcher");
let res = collector.watch_values(&record.metadata, &sub_ac.provider,
&mut builder).await;
info!(sub_logger, "Stopped watching");
// Set a new record if the watcher stops; this could be the result of a
// timeout or a change in the Record value, which may need a refresh.
record = match res {
Ok(r) => Arc::new(r),
Err(e) => {
crit!(sub_logger, "Error! {}", e);
break
}
}
}
}
}));
}
}
let secret_logger = root_logger.new(o!());
handles.push(tokio::spawn(async move {
loop {
info!(secret_logger, "Watching over Secrets to detect configuration changes");
let mut secret_watcher = secrets
.watch(&ListParams::default(), "0")
.await
.unwrap()
.boxed();
while let Ok(Some(secret_status)) = secret_watcher.try_next().await {
// If the configuration changes, trigger a panic which will cause a restart.
match secret_status {
WatchEvent::Modified(modified) => {
if modified.metadata.uid == secret.metadata.uid {
info!(secret_logger, "Found config change, terminating");
std::process::exit(0);
}
},
WatchEvent | {
let opts: cli::Opts = cli::Opts::parse();
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
let root_logger = slog::Logger::root(
drain,
o!("secret" => opts.secret.clone(),
"secret_key" => opts.secret_key.clone(),
"secret_namespace" => opts.secret_namespace.clone()),
);
let client = Client::try_default().await?;
info!(root_logger, "Loading configuration from Secret");
let secrets: Api<Secret> = Api::namespaced(client, opts.secret_namespace.as_str());
let secret = secrets.get(opts.secret.as_str()).await?;
let config_data = secret
.data
.as_ref()
.ok_or(anyhow!("Unable to get data from Secret"))?; | identifier_body |
core.rs | ) -> std::fmt::Result {
write!(f, "{}, {}", self.inner, self.token)
}
}
macro_rules! token_predicate {
($token:expr) => ($token);
($token1:expr, $token2:expr $(, $tokens:expr)*) => (
TokenPredicate::new(token_predicate!($token2 $(, $tokens)*), $token1)
);
}
// TODO: Document this!
pub fn recover<P: Parser>(p: &mut P, term: &[Token], eat_term: bool) {
let mut stack = Vec::new();
loop {
let Spanned {
value: tkn,
span: sp,
} = p.peek(0);
if stack.is_empty() {
for t in term {
if *t == tkn {
if eat_term {
p.skip();
}
return;
}
}
}
// p.emit(
// DiagBuilder2::note("Skipped during recovery")
// .span(sp)
// );
match tkn {
OpenDelim(x) => stack.push(x),
CloseDelim(x) => {
if let Some(open) = stack.pop() {
if open != x {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} which is not the complement to the previous \
opening {}",
CloseDelim(x),
OpenDelim(open)
))
.span(sp),
);
break;
}
} else {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} without an earlier opening {}",
CloseDelim(x),
OpenDelim(x)
))
.span(sp),
);
break;
}
}
Eof => break,
_ => (),
}
p.skip();
}
}
/// Apply a parser and if it fails, recover to one of a list of tokens. This
/// turns reported into recovered errors.
pub fn recovered<P: Parser, R, F>(
p: &mut P,
term: &[Token],
eat_term: bool,
mut parse: F,
) -> RecoveredResult<R>
where
F: FnMut(&mut P) -> Result<R, Reported>,
{
match parse(p) {
Ok(x) => Ok(x),
Err(Reported) => {
recover(p, term, eat_term);
Err(Recovered)
}
}
}
/// Consume a token if it is present, do nothing otherwise.
pub fn accept<P: Parser>(p: &mut P, expect: Token) -> bool {
if p.peek(0).value == expect {
p.bump();
true
} else {
false
}
}
/// Consume a specific token, or emit an error if the next token in the stream
/// does not match the expected one.
pub fn require<P: Parser>(p: &mut P, expect: Token) -> ReportedResult<()> {
let Spanned {
value: actual,
span,
} = p.peek(0);
if actual == expect {
p.bump();
Ok(())
} else {
p.emit(
DiagBuilder2::error(format!("Expected {}, but found {} instead", expect, actual))
.span(span),
);
Err(Reported)
}
}
/// Repeatedly apply a parser until it returns `None`.
pub fn repeat<P: Parser, R, F, E>(p: &mut P, mut parse: F) -> Result<Vec<R>, E>
where
F: FnMut(&mut P) -> Result<Option<R>, E>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof {
match parse(p)? {
Some(x) => v.push(x),
None => break,
}
}
Ok(v)
}
/// Repeatedly apply a parser until a certain predicate matches.
pub fn repeat_until<P: Parser, R, F, T>(
p: &mut P,
mut term: T,
mut parse: F,
) -> Result<Vec<R>, Recovered>
where
F: FnMut(&mut P) -> Result<R, Reported>,
T: Predicate<P>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof && !term.matches(p) {
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
}
Ok(v)
}
/// Parse a list of items separated with a specific token, until a terminator
/// oktne has been reached. The terminator is not consumed.
pub fn separated<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
mut term: T,
msg: &M,
mut parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let mut v = Vec::new();
while !p.is_fatal() && p.peek(0).value != Eof && !term.matches(p) {
// Parse the item. If the parser fails, recover to the terminator and
// abort.
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
// Try to match the terminator. If it does not, consume a separator and
// catch the case where the separator is immediately followed by the
// terminator (superfluous separator).
if term.matches(p) {
break;
} else if accept(p, sep) {
if term.matches(p) {
let q = p.last_span();
p.emit(DiagBuilder2::warning(format!("Superfluous trailing {}", sep)).span(q));
break;
}
} else {
let Spanned { value: tkn, span } = p.peek(0);
p.emit(
DiagBuilder2::error(format!(
"Expected {} or {} after {}, but found {} instead",
term, sep, msg, tkn
))
.span(span),
);
term.recover(p, false);
return Err(Recovered);
}
}
Ok(v)
}
/// Parse a non-empty list. See `separated` for details.
pub fn separated_nonempty<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
term: T,
msg: &M,
parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let q = p.peek(0).span;
let v = separated(p, sep, term, msg, parse)?;
if v.is_empty() {
p.emit(DiagBuilder2::error(format!("Expected at least one {}", msg)).span(q));
Err(Recovered)
} else {
Ok(v)
}
}
/// Parses the opening delimiter, calls the `inner` function, and parses the
/// closing delimiter. Properly recovers to and including the closing
/// delimiter if the `inner` function throws an error.
pub fn flanked<P: Parser, R, F>(p: &mut P, delim: DelimToken, mut inner: F) -> RecoveredResult<R>
where
F: FnMut(&mut P) -> ReportedResult<R>,
{
require(p, OpenDelim(delim)).map_err(|_| Recovered)?;
match inner(p) {
Ok(r) => match require(p, CloseDelim(delim)) {
Ok(_) => Ok(r),
Err(Reported) => {
recover(p, &[CloseDelim(delim)], true);
Err(Recovered)
}
},
Err(Reported) => {
recover(p, &[CloseDelim(delim)], true);
Err(Recovered)
}
}
}
/// If the opening delimiter is present, consumes it, calls the `inner`
/// function, and parses the closing delimiter. Properly recovers to and
/// including the closing delimiter if the `inner` function throws an error.
/// If the opening delimiter is not present, returns `None`.
pub fn try_flanked<P: Parser, R, F>(
p: &mut P,
delim: DelimToken,
inner: F,
) -> RecoveredResult<Option<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
{
if p.peek(0).value == OpenDelim(delim) {
flanked(p, delim, inner).map(|r| Some(r))
} else {
Ok(None)
}
}
/// Parse an identifier.
pub fn parse_ident<P: Parser, M: Display>(p: &mut P, msg: M) -> ReportedResult<Spanned<Name>> {
let Spanned { value, span } = p.peek(0);
match value {
Ident(n) => {
p.bump();
Ok(Spanned::new(n, span))
}
wrong => {
p.emit(
DiagBuilder2::error(format!("Expected {}, but found {} instead", msg, wrong))
.span(span),
);
Err(Reported)
}
}
}
/// Try to parse an identifier.
pub fn | try_ident | identifier_name |
|
core.rs | , R: FnMut(&mut P, bool)> {
match_func: M,
recover_func: R,
desc: &'static str,
_marker: PhantomData<P>,
}
impl<P, M, R> Predicate<P> for FuncPredicate<P, M, R>
where
P: Parser,
M: FnMut(&mut P) -> bool,
R: FnMut(&mut P, bool),
{
fn matches(&mut self, p: &mut P) -> bool {
(self.match_func)(p)
}
fn recover(&mut self, p: &mut P, consume: bool) {
(self.recover_func)(p, consume)
}
}
impl<P, M, R> Display for FuncPredicate<P, M, R>
where
P: Parser,
M: FnMut(&mut P) -> bool,
R: FnMut(&mut P, bool),
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.desc)
}
}
pub struct TokenPredicate<P: Parser, T: Predicate<P>> {
inner: T,
token: Token,
_marker: PhantomData<P>,
}
impl<P, T> TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
/// Create a new token predicate.
pub fn new(inner: T, token: Token) -> TokenPredicate<P, T> {
TokenPredicate {
inner: inner,
token: token,
_marker: PhantomData,
}
}
}
impl<P, T> Predicate<P> for TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
fn matches(&mut self, p: &mut P) -> bool {
self.inner.matches(p) || p.peek(0).value == self.token
}
fn recover(&mut self, p: &mut P, consume: bool) {
self.inner.recover(p, consume)
}
}
impl<P, T> Display for TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}, {}", self.inner, self.token)
}
}
macro_rules! token_predicate {
($token:expr) => ($token);
($token1:expr, $token2:expr $(, $tokens:expr)*) => (
TokenPredicate::new(token_predicate!($token2 $(, $tokens)*), $token1)
);
}
// TODO: Document this!
pub fn recover<P: Parser>(p: &mut P, term: &[Token], eat_term: bool) {
let mut stack = Vec::new();
loop {
let Spanned {
value: tkn,
span: sp,
} = p.peek(0);
if stack.is_empty() {
for t in term {
if *t == tkn {
if eat_term {
p.skip();
}
return;
}
}
}
// p.emit(
// DiagBuilder2::note("Skipped during recovery")
// .span(sp)
// );
match tkn {
OpenDelim(x) => stack.push(x),
CloseDelim(x) => {
if let Some(open) = stack.pop() {
if open != x {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} which is not the complement to the previous \
opening {}",
CloseDelim(x),
OpenDelim(open)
))
.span(sp),
);
break;
}
} else {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} without an earlier opening {}",
CloseDelim(x),
OpenDelim(x)
))
.span(sp),
);
break;
}
}
Eof => break,
_ => (),
}
p.skip();
}
}
/// Apply a parser and if it fails, recover to one of a list of tokens. This
/// turns reported into recovered errors.
pub fn recovered<P: Parser, R, F>(
p: &mut P,
term: &[Token],
eat_term: bool,
mut parse: F,
) -> RecoveredResult<R>
where
F: FnMut(&mut P) -> Result<R, Reported>,
{
match parse(p) {
Ok(x) => Ok(x),
Err(Reported) => {
recover(p, term, eat_term);
Err(Recovered)
}
}
}
/// Consume a token if it is present, do nothing otherwise.
pub fn accept<P: Parser>(p: &mut P, expect: Token) -> bool {
if p.peek(0).value == expect {
p.bump();
true
} else {
false
}
}
/// Consume a specific token, or emit an error if the next token in the stream
/// does not match the expected one.
pub fn require<P: Parser>(p: &mut P, expect: Token) -> ReportedResult<()> {
let Spanned {
value: actual,
span,
} = p.peek(0);
if actual == expect {
p.bump();
Ok(())
} else {
p.emit(
DiagBuilder2::error(format!("Expected {}, but found {} instead", expect, actual))
.span(span),
);
Err(Reported)
}
}
/// Repeatedly apply a parser until it returns `None`.
pub fn repeat<P: Parser, R, F, E>(p: &mut P, mut parse: F) -> Result<Vec<R>, E>
where
F: FnMut(&mut P) -> Result<Option<R>, E>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof {
match parse(p)? {
Some(x) => v.push(x),
None => break,
}
}
Ok(v)
}
/// Repeatedly apply a parser until a certain predicate matches.
pub fn repeat_until<P: Parser, R, F, T>(
p: &mut P,
mut term: T,
mut parse: F,
) -> Result<Vec<R>, Recovered>
where
F: FnMut(&mut P) -> Result<R, Reported>,
T: Predicate<P>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof && !term.matches(p) { | Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
}
Ok(v)
}
/// Parse a list of items separated with a specific token, until a terminator
/// oktne has been reached. The terminator is not consumed.
pub fn separated<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
mut term: T,
msg: &M,
mut parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let mut v = Vec::new();
while !p.is_fatal() && p.peek(0).value != Eof && !term.matches(p) {
// Parse the item. If the parser fails, recover to the terminator and
// abort.
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
// Try to match the terminator. If it does not, consume a separator and
// catch the case where the separator is immediately followed by the
// terminator (superfluous separator).
if term.matches(p) {
break;
} else if accept(p, sep) {
if term.matches(p) {
let q = p.last_span();
p.emit(DiagBuilder2::warning(format!("Superfluous trailing {}", sep)).span(q));
break;
}
} else {
let Spanned { value: tkn, span } = p.peek(0);
p.emit(
DiagBuilder2::error(format!(
"Expected {} or {} after {}, but found {} instead",
term, sep, msg, tkn
))
.span(span),
);
term.recover(p, false);
return Err(Recovered);
}
}
Ok(v)
}
/// Parse a non-empty list. See `separated` for details.
pub fn separated_nonempty<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
term: T,
msg: &M,
parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let q = p.peek(0).span;
let v = separated(p, sep, term, msg, parse)?;
if v.is_empty() {
p.emit(DiagBuilder2::error(format!("Expected at least one {}", msg)).span(q));
Err(Recovered)
} else {
Ok(v)
}
}
/// Parses the opening delimiter, calls the `inner` function, and parses the
/// closing delimiter | match parse(p) { | random_line_split |
core.rs | , R: FnMut(&mut P, bool)> {
match_func: M,
recover_func: R,
desc: &'static str,
_marker: PhantomData<P>,
}
impl<P, M, R> Predicate<P> for FuncPredicate<P, M, R>
where
P: Parser,
M: FnMut(&mut P) -> bool,
R: FnMut(&mut P, bool),
{
fn matches(&mut self, p: &mut P) -> bool {
(self.match_func)(p)
}
fn recover(&mut self, p: &mut P, consume: bool) {
(self.recover_func)(p, consume)
}
}
impl<P, M, R> Display for FuncPredicate<P, M, R>
where
P: Parser,
M: FnMut(&mut P) -> bool,
R: FnMut(&mut P, bool),
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.desc)
}
}
pub struct TokenPredicate<P: Parser, T: Predicate<P>> {
inner: T,
token: Token,
_marker: PhantomData<P>,
}
impl<P, T> TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
/// Create a new token predicate.
pub fn new(inner: T, token: Token) -> TokenPredicate<P, T> |
}
impl<P, T> Predicate<P> for TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
fn matches(&mut self, p: &mut P) -> bool {
self.inner.matches(p) || p.peek(0).value == self.token
}
fn recover(&mut self, p: &mut P, consume: bool) {
self.inner.recover(p, consume)
}
}
impl<P, T> Display for TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}, {}", self.inner, self.token)
}
}
macro_rules! token_predicate {
($token:expr) => ($token);
($token1:expr, $token2:expr $(, $tokens:expr)*) => (
TokenPredicate::new(token_predicate!($token2 $(, $tokens)*), $token1)
);
}
// TODO: Document this!
pub fn recover<P: Parser>(p: &mut P, term: &[Token], eat_term: bool) {
let mut stack = Vec::new();
loop {
let Spanned {
value: tkn,
span: sp,
} = p.peek(0);
if stack.is_empty() {
for t in term {
if *t == tkn {
if eat_term {
p.skip();
}
return;
}
}
}
// p.emit(
// DiagBuilder2::note("Skipped during recovery")
// .span(sp)
// );
match tkn {
OpenDelim(x) => stack.push(x),
CloseDelim(x) => {
if let Some(open) = stack.pop() {
if open != x {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} which is not the complement to the previous \
opening {}",
CloseDelim(x),
OpenDelim(open)
))
.span(sp),
);
break;
}
} else {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} without an earlier opening {}",
CloseDelim(x),
OpenDelim(x)
))
.span(sp),
);
break;
}
}
Eof => break,
_ => (),
}
p.skip();
}
}
/// Apply a parser and if it fails, recover to one of a list of tokens. This
/// turns reported into recovered errors.
pub fn recovered<P: Parser, R, F>(
p: &mut P,
term: &[Token],
eat_term: bool,
mut parse: F,
) -> RecoveredResult<R>
where
F: FnMut(&mut P) -> Result<R, Reported>,
{
match parse(p) {
Ok(x) => Ok(x),
Err(Reported) => {
recover(p, term, eat_term);
Err(Recovered)
}
}
}
/// Consume a token if it is present, do nothing otherwise.
pub fn accept<P: Parser>(p: &mut P, expect: Token) -> bool {
if p.peek(0).value == expect {
p.bump();
true
} else {
false
}
}
/// Consume a specific token, or emit an error if the next token in the stream
/// does not match the expected one.
pub fn require<P: Parser>(p: &mut P, expect: Token) -> ReportedResult<()> {
let Spanned {
value: actual,
span,
} = p.peek(0);
if actual == expect {
p.bump();
Ok(())
} else {
p.emit(
DiagBuilder2::error(format!("Expected {}, but found {} instead", expect, actual))
.span(span),
);
Err(Reported)
}
}
/// Repeatedly apply a parser until it returns `None`.
pub fn repeat<P: Parser, R, F, E>(p: &mut P, mut parse: F) -> Result<Vec<R>, E>
where
F: FnMut(&mut P) -> Result<Option<R>, E>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof {
match parse(p)? {
Some(x) => v.push(x),
None => break,
}
}
Ok(v)
}
/// Repeatedly apply a parser until a certain predicate matches.
pub fn repeat_until<P: Parser, R, F, T>(
p: &mut P,
mut term: T,
mut parse: F,
) -> Result<Vec<R>, Recovered>
where
F: FnMut(&mut P) -> Result<R, Reported>,
T: Predicate<P>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof && !term.matches(p) {
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
}
Ok(v)
}
/// Parse a list of items separated with a specific token, until a terminator
/// oktne has been reached. The terminator is not consumed.
pub fn separated<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
mut term: T,
msg: &M,
mut parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let mut v = Vec::new();
while !p.is_fatal() && p.peek(0).value != Eof && !term.matches(p) {
// Parse the item. If the parser fails, recover to the terminator and
// abort.
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
// Try to match the terminator. If it does not, consume a separator and
// catch the case where the separator is immediately followed by the
// terminator (superfluous separator).
if term.matches(p) {
break;
} else if accept(p, sep) {
if term.matches(p) {
let q = p.last_span();
p.emit(DiagBuilder2::warning(format!("Superfluous trailing {}", sep)).span(q));
break;
}
} else {
let Spanned { value: tkn, span } = p.peek(0);
p.emit(
DiagBuilder2::error(format!(
"Expected {} or {} after {}, but found {} instead",
term, sep, msg, tkn
))
.span(span),
);
term.recover(p, false);
return Err(Recovered);
}
}
Ok(v)
}
/// Parse a non-empty list. See `separated` for details.
pub fn separated_nonempty<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
term: T,
msg: &M,
parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let q = p.peek(0).span;
let v = separated(p, sep, term, msg, parse)?;
if v.is_empty() {
p.emit(DiagBuilder2::error(format!("Expected at least one {}", msg)).span(q));
Err(Recovered)
} else {
Ok(v)
}
}
/// Parses the opening delimiter, calls the `inner` function, and parses the
/// | {
TokenPredicate {
inner: inner,
token: token,
_marker: PhantomData,
}
} | identifier_body |
core.rs | , R: FnMut(&mut P, bool)> {
match_func: M,
recover_func: R,
desc: &'static str,
_marker: PhantomData<P>,
}
impl<P, M, R> Predicate<P> for FuncPredicate<P, M, R>
where
P: Parser,
M: FnMut(&mut P) -> bool,
R: FnMut(&mut P, bool),
{
fn matches(&mut self, p: &mut P) -> bool {
(self.match_func)(p)
}
fn recover(&mut self, p: &mut P, consume: bool) {
(self.recover_func)(p, consume)
}
}
impl<P, M, R> Display for FuncPredicate<P, M, R>
where
P: Parser,
M: FnMut(&mut P) -> bool,
R: FnMut(&mut P, bool),
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.desc)
}
}
pub struct TokenPredicate<P: Parser, T: Predicate<P>> {
inner: T,
token: Token,
_marker: PhantomData<P>,
}
impl<P, T> TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
/// Create a new token predicate.
pub fn new(inner: T, token: Token) -> TokenPredicate<P, T> {
TokenPredicate {
inner: inner,
token: token,
_marker: PhantomData,
}
}
}
impl<P, T> Predicate<P> for TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
fn matches(&mut self, p: &mut P) -> bool {
self.inner.matches(p) || p.peek(0).value == self.token
}
fn recover(&mut self, p: &mut P, consume: bool) {
self.inner.recover(p, consume)
}
}
impl<P, T> Display for TokenPredicate<P, T>
where
P: Parser,
T: Predicate<P>,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}, {}", self.inner, self.token)
}
}
macro_rules! token_predicate {
($token:expr) => ($token);
($token1:expr, $token2:expr $(, $tokens:expr)*) => (
TokenPredicate::new(token_predicate!($token2 $(, $tokens)*), $token1)
);
}
// TODO: Document this!
pub fn recover<P: Parser>(p: &mut P, term: &[Token], eat_term: bool) {
let mut stack = Vec::new();
loop {
let Spanned {
value: tkn,
span: sp,
} = p.peek(0);
if stack.is_empty() {
for t in term {
if *t == tkn {
if eat_term {
p.skip();
}
return;
}
}
}
// p.emit(
// DiagBuilder2::note("Skipped during recovery")
// .span(sp)
// );
match tkn {
OpenDelim(x) => stack.push(x),
CloseDelim(x) => {
if let Some(open) = stack.pop() {
if open != x {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} which is not the complement to the previous \
opening {}",
CloseDelim(x),
OpenDelim(open)
))
.span(sp),
);
break;
}
} else {
p.emit(
DiagBuilder2::fatal(format!(
"Found closing {} without an earlier opening {}",
CloseDelim(x),
OpenDelim(x)
))
.span(sp),
);
break;
}
}
Eof => break,
_ => (),
}
p.skip();
}
}
/// Apply a parser and if it fails, recover to one of a list of tokens. This
/// turns reported into recovered errors.
pub fn recovered<P: Parser, R, F>(
p: &mut P,
term: &[Token],
eat_term: bool,
mut parse: F,
) -> RecoveredResult<R>
where
F: FnMut(&mut P) -> Result<R, Reported>,
{
match parse(p) {
Ok(x) => Ok(x),
Err(Reported) => {
recover(p, term, eat_term);
Err(Recovered)
}
}
}
/// Consume a token if it is present, do nothing otherwise.
pub fn accept<P: Parser>(p: &mut P, expect: Token) -> bool {
if p.peek(0).value == expect {
p.bump();
true
} else |
}
/// Consume a specific token, or emit an error if the next token in the stream
/// does not match the expected one.
pub fn require<P: Parser>(p: &mut P, expect: Token) -> ReportedResult<()> {
let Spanned {
value: actual,
span,
} = p.peek(0);
if actual == expect {
p.bump();
Ok(())
} else {
p.emit(
DiagBuilder2::error(format!("Expected {}, but found {} instead", expect, actual))
.span(span),
);
Err(Reported)
}
}
/// Repeatedly apply a parser until it returns `None`.
pub fn repeat<P: Parser, R, F, E>(p: &mut P, mut parse: F) -> Result<Vec<R>, E>
where
F: FnMut(&mut P) -> Result<Option<R>, E>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof {
match parse(p)? {
Some(x) => v.push(x),
None => break,
}
}
Ok(v)
}
/// Repeatedly apply a parser until a certain predicate matches.
pub fn repeat_until<P: Parser, R, F, T>(
p: &mut P,
mut term: T,
mut parse: F,
) -> Result<Vec<R>, Recovered>
where
F: FnMut(&mut P) -> Result<R, Reported>,
T: Predicate<P>,
{
let mut v = Vec::new();
while p.peek(0).value != Eof && !term.matches(p) {
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
}
Ok(v)
}
/// Parse a list of items separated with a specific token, until a terminator
/// oktne has been reached. The terminator is not consumed.
pub fn separated<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
mut term: T,
msg: &M,
mut parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let mut v = Vec::new();
while !p.is_fatal() && p.peek(0).value != Eof && !term.matches(p) {
// Parse the item. If the parser fails, recover to the terminator and
// abort.
match parse(p) {
Ok(x) => v.push(x),
Err(_) => {
term.recover(p, false);
return Err(Recovered);
}
}
// Try to match the terminator. If it does not, consume a separator and
// catch the case where the separator is immediately followed by the
// terminator (superfluous separator).
if term.matches(p) {
break;
} else if accept(p, sep) {
if term.matches(p) {
let q = p.last_span();
p.emit(DiagBuilder2::warning(format!("Superfluous trailing {}", sep)).span(q));
break;
}
} else {
let Spanned { value: tkn, span } = p.peek(0);
p.emit(
DiagBuilder2::error(format!(
"Expected {} or {} after {}, but found {} instead",
term, sep, msg, tkn
))
.span(span),
);
term.recover(p, false);
return Err(Recovered);
}
}
Ok(v)
}
/// Parse a non-empty list. See `separated` for details.
pub fn separated_nonempty<P: Parser, M, R, F, T>(
p: &mut P,
sep: Token,
term: T,
msg: &M,
parse: F,
) -> RecoveredResult<Vec<R>>
where
F: FnMut(&mut P) -> ReportedResult<R>,
T: Predicate<P>,
M: Display + ?Sized,
{
let q = p.peek(0).span;
let v = separated(p, sep, term, msg, parse)?;
if v.is_empty() {
p.emit(DiagBuilder2::error(format!("Expected at least one {}", msg)).span(q));
Err(Recovered)
} else {
Ok(v)
}
}
/// Parses the opening delimiter, calls the `inner` function, and parses the
/// | {
false
} | conditional_block |
bot.py | import discord
from discord.ext import commands, menus
import config
from cogs.help import HelpCommand
from utils.jsonfile import JSONList, JSONDict
from utils.context import Context
from collections import Counter
import datetime
from contextlib import suppress
import traceback
import re
import os
extensions = (
"cogs.settings",
"cogs.core",
"cogs.voice",
"cogs.admin",
)
intents = discord.Intents.none()
intents.guilds = True
intents.members = True
intents.voice_states = True
intents.presences = True
intents.guild_messages = True
intents.guild_reactions = True
class Bot(commands.Bot):
def __init__(self):
super().__init__(
intents=intents,
command_prefix=lambda b, m: b.prefixes.get(str(m.guild.id), 'ui.'),
help_command=HelpCommand(),
case_insensitive=True,
owner_id=config.owner_id,
activity=discord.Activity(type=discord.ActivityType.watching, name='Upin Ipin!')
)
self.launched_at = None
self.client_id = config.client_id
if not os.path.exists('data'):
os.mkdir('data')
self.prefixes = JSONDict('data/prefixes.json') # Mapping[guild_id, prefix]
self.bad_words = JSONDict('data/bad_words.json') # Mapping[guild_id, List[str]]
self.configs = JSONDict('data/configs.json') # Mapping[channel_id, config]
self.channels = JSONList('data/channels.json') # List[channel_id]
self.blacklist = JSONList('data/blacklist.json') # List[user_id|guild_id]
self.voice_spam_control = commands.CooldownMapping.from_cooldown(2, 10, commands.BucketType.user)
self.voice_spam_counter = Counter()
self.text_spam_control = commands.CooldownMapping.from_cooldown(8, 10, commands.BucketType.user)
self.text_spam_counter = Counter()
for extension in extensions:
self.load_extension(extension)
async def on_ready(self):
|
async def on_message(self, message):
if message.guild is None:
return
await self.process_commands(message)
async def on_message_edit(self, before, after):
if before.content != after.content:
await self.on_message(after)
async def process_commands(self, message):
ctx = await self.get_context(message, cls=Context)
if ctx.command is None:
return
if ctx.author.id in self.blacklist:
return
if not ctx.channel.permissions_for(ctx.guild.me).send_messages:
return
bucket = self.text_spam_control.get_bucket(message)
current = message.created_at.replace(tzinfo=datetime.timezone.utc).timestamp()
retry_after = bucket.update_rate_limit(current)
if retry_after:
self.text_spam_counter[ctx.author.id] += 1
if self.text_spam_counter[ctx.author.id] >= 5:
del self.text_spam_counter[ctx.author.id]
self.blacklist.append(ctx.author.id)
await self.blacklist.save()
await ctx.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
self.text_spam_counter.pop(message.author.id, None)
await self.invoke(ctx)
async def on_voice_state_update(self, member, before, after):
if before.channel != after.channel:
if before.channel is not None:
await self.on_voice_leave(before.channel)
if after.channel is not None:
await self.on_voice_join(member, after.channel)
async def on_voice_join(self, member, channel):
if member.id in self.blacklist:
return
if not str(channel.id) in self.configs:
return
perms = member.guild.me.guild_permissions
if not perms.manage_channels or not perms.move_members:
return
fake_message = discord.Object(id=0)
fake_message.author = member
bucket = self.voice_spam_control.get_bucket(fake_message)
retry_after = bucket.update_rate_limit()
if retry_after:
self.voice_spam_counter[member.id] += 1
if self.voice_spam_counter[member.id] >= 5:
del self.text_spam_counter[member.id]
self.blacklist.append(member.id)
await self.blacklist.save()
with suppress(discord.Forbidden):
await member.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
settings = self.configs[str(channel.id)]
name = settings.get('name', '@user')
limit = settings.get('limit', 0)
bitrate = settings.get('bitrate', 64000)
top = settings.get('top', False)
try:
category = member.guild.get_channel(settings['category'])
except KeyError:
category = channel.category
if '@user' in name:
name = name.replace('@user', member.display_name)
if '@game' in name:
for activity in member.activities:
if activity.type == discord.ActivityType.playing and activity.name is not None:
name = name.replace('@game', activity.name)
break
else:
name = name.replace('@game', 'no game')
if '@position' in name:
channels = [c for c in category.voice_channels if c.id in self.channels]
name = name.replace('@position', str(len(channels)+1))
words = self.bad_words.get(str(member.guild.id), [])
for word in words:
if word.casefold() in name.casefold():
name = re.sub(word, '*'*len(word), name, flags=re.IGNORECASE)
if len(name) > 100:
name = name[:97] + '...'
if perms.manage_roles:
overwrites = {member: discord.PermissionOverwrite(
manage_channels=True,
move_members=True
)}
else:
overwrites = None
new_channel = await member.guild.create_voice_channel(
overwrites=overwrites,
name=name,
category=category,
user_limit=limit,
bitrate=bitrate
)
if top:
self.loop.create_task(new_channel.edit(position=0))
await member.move_to(new_channel)
self.channels.append(new_channel.id)
await self.channels.save()
async def on_voice_leave(self, channel):
if channel.id in self.channels:
if len(channel.members) == 0:
ch = channel.guild.get_channel(channel.id)
if ch is not None:
perms = channel.permissions_for(channel.guild.me)
if perms.manage_channels:
await channel.delete()
self.channels.remove(channel.id)
await self.channels.save()
async def on_guild_channel_delete(self, channel):
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
return
await self.configs.save()
async def on_guild_remove(self, guild):
try:
self.prefixes.pop(str(guild.id))
except KeyError:
pass
else:
await self.prefixes.save()
try:
self.bad_words.pop(str(guild.id))
except KeyError:
pass
else:
await self.bad_words.save()
channel_dump = False
config_dump = False
for channel in guild.voice_channels:
if channel.id in self.channels:
self.channels.remove(channel.id)
channel_dump = True
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
continue
config_dump = True
if channel_dump:
await self.channels.save()
if config_dump:
await self.configs.save()
async def on_guild_join(self, guild):
if guild.id in self.blacklist:
await guild.leave()
async def on_command_error(self, ctx, error):
if isinstance(error, commands.CommandNotFound):
return
else:
ctx.command.reset_cooldown(ctx)
if isinstance(error, commands.CommandInvokeError) and not isinstance(error.original, menus.MenuError):
error = error.original
traceback.print_exception(error.__class__.__name__, error, error.__traceback__)
owner = self.get_user(self.owner_id)
if owner is not None:
tb = '\n'.join(traceback.format_exception(error.__class__.__name__, error, error.__traceback__))
with suppress(discord.HTTPException):
await owner.send(embed=discord.Embed(
description=f'```py\n{tb}```',
color=discord.Color.red()
))
else:
if isinstance(error, commands.CommandInvokeError):
error = error.original
await ctx.safe_send(msg=str(error).capitalize(), color=discord.Color.red())
if __name__ == "__main__":
Bot().run(os.environ['DISCORD_TOKEN'])
| if self.launched_at is None:
self.launched_at = datetime.datetime.utcnow()
for guild in self.guilds:
for channel in guild.voice_channels:
await self.on_voice_leave(channel)
print('Logged in as', self.user) | identifier_body |
bot.py | import discord
from discord.ext import commands, menus
import config
from cogs.help import HelpCommand
from utils.jsonfile import JSONList, JSONDict
from utils.context import Context
from collections import Counter
import datetime
from contextlib import suppress
import traceback
import re
import os
extensions = (
"cogs.settings",
"cogs.core",
"cogs.voice",
"cogs.admin",
)
intents = discord.Intents.none()
intents.guilds = True
intents.members = True
intents.voice_states = True
intents.presences = True
intents.guild_messages = True
intents.guild_reactions = True
class Bot(commands.Bot):
def __init__(self):
super().__init__(
intents=intents,
command_prefix=lambda b, m: b.prefixes.get(str(m.guild.id), 'ui.'),
help_command=HelpCommand(),
case_insensitive=True,
owner_id=config.owner_id,
activity=discord.Activity(type=discord.ActivityType.watching, name='Upin Ipin!')
)
self.launched_at = None
self.client_id = config.client_id
if not os.path.exists('data'):
os.mkdir('data')
self.prefixes = JSONDict('data/prefixes.json') # Mapping[guild_id, prefix]
self.bad_words = JSONDict('data/bad_words.json') # Mapping[guild_id, List[str]]
self.configs = JSONDict('data/configs.json') # Mapping[channel_id, config]
self.channels = JSONList('data/channels.json') # List[channel_id]
self.blacklist = JSONList('data/blacklist.json') # List[user_id|guild_id]
self.voice_spam_control = commands.CooldownMapping.from_cooldown(2, 10, commands.BucketType.user)
self.voice_spam_counter = Counter()
self.text_spam_control = commands.CooldownMapping.from_cooldown(8, 10, commands.BucketType.user)
self.text_spam_counter = Counter()
for extension in extensions:
self.load_extension(extension)
async def on_ready(self):
if self.launched_at is None:
self.launched_at = datetime.datetime.utcnow()
for guild in self.guilds:
for channel in guild.voice_channels:
await self.on_voice_leave(channel)
print('Logged in as', self.user)
async def on_message(self, message):
if message.guild is None:
return
await self.process_commands(message)
async def on_message_edit(self, before, after):
if before.content != after.content:
await self.on_message(after)
async def process_commands(self, message):
ctx = await self.get_context(message, cls=Context)
if ctx.command is None:
return
if ctx.author.id in self.blacklist:
return
if not ctx.channel.permissions_for(ctx.guild.me).send_messages:
return
bucket = self.text_spam_control.get_bucket(message)
current = message.created_at.replace(tzinfo=datetime.timezone.utc).timestamp()
retry_after = bucket.update_rate_limit(current)
if retry_after:
self.text_spam_counter[ctx.author.id] += 1
if self.text_spam_counter[ctx.author.id] >= 5:
del self.text_spam_counter[ctx.author.id]
self.blacklist.append(ctx.author.id)
await self.blacklist.save()
await ctx.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
self.text_spam_counter.pop(message.author.id, None)
await self.invoke(ctx)
async def on_voice_state_update(self, member, before, after):
if before.channel != after.channel:
if before.channel is not None:
await self.on_voice_leave(before.channel)
if after.channel is not None:
await self.on_voice_join(member, after.channel)
async def on_voice_join(self, member, channel):
if member.id in self.blacklist:
return
if not str(channel.id) in self.configs:
return
perms = member.guild.me.guild_permissions
if not perms.manage_channels or not perms.move_members:
return
fake_message = discord.Object(id=0)
fake_message.author = member
bucket = self.voice_spam_control.get_bucket(fake_message)
retry_after = bucket.update_rate_limit()
if retry_after:
self.voice_spam_counter[member.id] += 1
if self.voice_spam_counter[member.id] >= 5:
del self.text_spam_counter[member.id]
self.blacklist.append(member.id)
await self.blacklist.save()
with suppress(discord.Forbidden):
await member.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
settings = self.configs[str(channel.id)]
name = settings.get('name', '@user')
limit = settings.get('limit', 0)
bitrate = settings.get('bitrate', 64000)
top = settings.get('top', False)
try:
category = member.guild.get_channel(settings['category'])
except KeyError:
category = channel.category
if '@user' in name:
name = name.replace('@user', member.display_name)
if '@game' in name:
for activity in member.activities:
if activity.type == discord.ActivityType.playing and activity.name is not None:
name = name.replace('@game', activity.name)
break
else:
name = name.replace('@game', 'no game')
if '@position' in name:
channels = [c for c in category.voice_channels if c.id in self.channels]
name = name.replace('@position', str(len(channels)+1))
words = self.bad_words.get(str(member.guild.id), [])
for word in words:
if word.casefold() in name.casefold():
name = re.sub(word, '*'*len(word), name, flags=re.IGNORECASE)
if len(name) > 100:
name = name[:97] + '...'
if perms.manage_roles:
overwrites = {member: discord.PermissionOverwrite(
manage_channels=True,
move_members=True
)}
else:
overwrites = None
new_channel = await member.guild.create_voice_channel(
overwrites=overwrites,
name=name,
category=category,
user_limit=limit,
bitrate=bitrate
)
if top:
self.loop.create_task(new_channel.edit(position=0))
await member.move_to(new_channel)
self.channels.append(new_channel.id)
await self.channels.save()
async def on_voice_leave(self, channel):
if channel.id in self.channels:
if len(channel.members) == 0:
ch = channel.guild.get_channel(channel.id)
if ch is not None:
perms = channel.permissions_for(channel.guild.me)
if perms.manage_channels:
await channel.delete()
self.channels.remove(channel.id)
await self.channels.save()
async def on_guild_channel_delete(self, channel):
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
return
await self.configs.save()
async def on_guild_remove(self, guild):
try:
self.prefixes.pop(str(guild.id))
except KeyError:
pass
else:
await self.prefixes.save()
try:
self.bad_words.pop(str(guild.id))
except KeyError:
pass
else:
await self.bad_words.save()
channel_dump = False
config_dump = False
for channel in guild.voice_channels:
if channel.id in self.channels:
self.channels.remove(channel.id)
channel_dump = True
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
continue
config_dump = True
if channel_dump:
await self.channels.save()
if config_dump:
await self.configs.save()
async def on_guild_join(self, guild):
| return
else:
ctx.command.reset_cooldown(ctx)
if isinstance(error, commands.CommandInvokeError) and not isinstance(error.original, menus.MenuError):
error = error.original
traceback.print_exception(error.__class__.__name__, error, error.__traceback__)
owner = self.get_user(self.owner_id)
if owner is not None:
tb = '\n'.join(traceback.format_exception(error.__class__.__name__, error, error.__traceback__))
with suppress(discord.HTTPException):
await owner.send(embed=discord.Embed(
description=f'```py\n{tb}```',
color=discord.Color.red()
))
else:
if isinstance(error, commands.CommandInvokeError):
error = error.original
await ctx.safe_send(msg=str(error).capitalize(), color=discord.Color.red())
if __name__ == "__main__":
Bot().run(os.environ['DISCORD_TOKEN']) | if guild.id in self.blacklist:
await guild.leave()
async def on_command_error(self, ctx, error):
if isinstance(error, commands.CommandNotFound):
| random_line_split |
bot.py | import discord
from discord.ext import commands, menus
import config
from cogs.help import HelpCommand
from utils.jsonfile import JSONList, JSONDict
from utils.context import Context
from collections import Counter
import datetime
from contextlib import suppress
import traceback
import re
import os
extensions = (
"cogs.settings",
"cogs.core",
"cogs.voice",
"cogs.admin",
)
intents = discord.Intents.none()
intents.guilds = True
intents.members = True
intents.voice_states = True
intents.presences = True
intents.guild_messages = True
intents.guild_reactions = True
class Bot(commands.Bot):
def __init__(self):
super().__init__(
intents=intents,
command_prefix=lambda b, m: b.prefixes.get(str(m.guild.id), 'ui.'),
help_command=HelpCommand(),
case_insensitive=True,
owner_id=config.owner_id,
activity=discord.Activity(type=discord.ActivityType.watching, name='Upin Ipin!')
)
self.launched_at = None
self.client_id = config.client_id
if not os.path.exists('data'):
os.mkdir('data')
self.prefixes = JSONDict('data/prefixes.json') # Mapping[guild_id, prefix]
self.bad_words = JSONDict('data/bad_words.json') # Mapping[guild_id, List[str]]
self.configs = JSONDict('data/configs.json') # Mapping[channel_id, config]
self.channels = JSONList('data/channels.json') # List[channel_id]
self.blacklist = JSONList('data/blacklist.json') # List[user_id|guild_id]
self.voice_spam_control = commands.CooldownMapping.from_cooldown(2, 10, commands.BucketType.user)
self.voice_spam_counter = Counter()
self.text_spam_control = commands.CooldownMapping.from_cooldown(8, 10, commands.BucketType.user)
self.text_spam_counter = Counter()
for extension in extensions:
self.load_extension(extension)
async def on_ready(self):
if self.launched_at is None:
self.launched_at = datetime.datetime.utcnow()
for guild in self.guilds:
for channel in guild.voice_channels:
await self.on_voice_leave(channel)
print('Logged in as', self.user)
async def on_message(self, message):
if message.guild is None:
return
await self.process_commands(message)
async def on_message_edit(self, before, after):
if before.content != after.content:
await self.on_message(after)
async def | (self, message):
ctx = await self.get_context(message, cls=Context)
if ctx.command is None:
return
if ctx.author.id in self.blacklist:
return
if not ctx.channel.permissions_for(ctx.guild.me).send_messages:
return
bucket = self.text_spam_control.get_bucket(message)
current = message.created_at.replace(tzinfo=datetime.timezone.utc).timestamp()
retry_after = bucket.update_rate_limit(current)
if retry_after:
self.text_spam_counter[ctx.author.id] += 1
if self.text_spam_counter[ctx.author.id] >= 5:
del self.text_spam_counter[ctx.author.id]
self.blacklist.append(ctx.author.id)
await self.blacklist.save()
await ctx.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
self.text_spam_counter.pop(message.author.id, None)
await self.invoke(ctx)
async def on_voice_state_update(self, member, before, after):
if before.channel != after.channel:
if before.channel is not None:
await self.on_voice_leave(before.channel)
if after.channel is not None:
await self.on_voice_join(member, after.channel)
async def on_voice_join(self, member, channel):
if member.id in self.blacklist:
return
if not str(channel.id) in self.configs:
return
perms = member.guild.me.guild_permissions
if not perms.manage_channels or not perms.move_members:
return
fake_message = discord.Object(id=0)
fake_message.author = member
bucket = self.voice_spam_control.get_bucket(fake_message)
retry_after = bucket.update_rate_limit()
if retry_after:
self.voice_spam_counter[member.id] += 1
if self.voice_spam_counter[member.id] >= 5:
del self.text_spam_counter[member.id]
self.blacklist.append(member.id)
await self.blacklist.save()
with suppress(discord.Forbidden):
await member.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
settings = self.configs[str(channel.id)]
name = settings.get('name', '@user')
limit = settings.get('limit', 0)
bitrate = settings.get('bitrate', 64000)
top = settings.get('top', False)
try:
category = member.guild.get_channel(settings['category'])
except KeyError:
category = channel.category
if '@user' in name:
name = name.replace('@user', member.display_name)
if '@game' in name:
for activity in member.activities:
if activity.type == discord.ActivityType.playing and activity.name is not None:
name = name.replace('@game', activity.name)
break
else:
name = name.replace('@game', 'no game')
if '@position' in name:
channels = [c for c in category.voice_channels if c.id in self.channels]
name = name.replace('@position', str(len(channels)+1))
words = self.bad_words.get(str(member.guild.id), [])
for word in words:
if word.casefold() in name.casefold():
name = re.sub(word, '*'*len(word), name, flags=re.IGNORECASE)
if len(name) > 100:
name = name[:97] + '...'
if perms.manage_roles:
overwrites = {member: discord.PermissionOverwrite(
manage_channels=True,
move_members=True
)}
else:
overwrites = None
new_channel = await member.guild.create_voice_channel(
overwrites=overwrites,
name=name,
category=category,
user_limit=limit,
bitrate=bitrate
)
if top:
self.loop.create_task(new_channel.edit(position=0))
await member.move_to(new_channel)
self.channels.append(new_channel.id)
await self.channels.save()
async def on_voice_leave(self, channel):
if channel.id in self.channels:
if len(channel.members) == 0:
ch = channel.guild.get_channel(channel.id)
if ch is not None:
perms = channel.permissions_for(channel.guild.me)
if perms.manage_channels:
await channel.delete()
self.channels.remove(channel.id)
await self.channels.save()
async def on_guild_channel_delete(self, channel):
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
return
await self.configs.save()
async def on_guild_remove(self, guild):
try:
self.prefixes.pop(str(guild.id))
except KeyError:
pass
else:
await self.prefixes.save()
try:
self.bad_words.pop(str(guild.id))
except KeyError:
pass
else:
await self.bad_words.save()
channel_dump = False
config_dump = False
for channel in guild.voice_channels:
if channel.id in self.channels:
self.channels.remove(channel.id)
channel_dump = True
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
continue
config_dump = True
if channel_dump:
await self.channels.save()
if config_dump:
await self.configs.save()
async def on_guild_join(self, guild):
if guild.id in self.blacklist:
await guild.leave()
async def on_command_error(self, ctx, error):
if isinstance(error, commands.CommandNotFound):
return
else:
ctx.command.reset_cooldown(ctx)
if isinstance(error, commands.CommandInvokeError) and not isinstance(error.original, menus.MenuError):
error = error.original
traceback.print_exception(error.__class__.__name__, error, error.__traceback__)
owner = self.get_user(self.owner_id)
if owner is not None:
tb = '\n'.join(traceback.format_exception(error.__class__.__name__, error, error.__traceback__))
with suppress(discord.HTTPException):
await owner.send(embed=discord.Embed(
description=f'```py\n{tb}```',
color=discord.Color.red()
))
else:
if isinstance(error, commands.CommandInvokeError):
error = error.original
await ctx.safe_send(msg=str(error).capitalize(), color=discord.Color.red())
if __name__ == "__main__":
Bot().run(os.environ['DISCORD_TOKEN'])
| process_commands | identifier_name |
bot.py | import discord
from discord.ext import commands, menus
import config
from cogs.help import HelpCommand
from utils.jsonfile import JSONList, JSONDict
from utils.context import Context
from collections import Counter
import datetime
from contextlib import suppress
import traceback
import re
import os
extensions = (
"cogs.settings",
"cogs.core",
"cogs.voice",
"cogs.admin",
)
intents = discord.Intents.none()
intents.guilds = True
intents.members = True
intents.voice_states = True
intents.presences = True
intents.guild_messages = True
intents.guild_reactions = True
class Bot(commands.Bot):
def __init__(self):
super().__init__(
intents=intents,
command_prefix=lambda b, m: b.prefixes.get(str(m.guild.id), 'ui.'),
help_command=HelpCommand(),
case_insensitive=True,
owner_id=config.owner_id,
activity=discord.Activity(type=discord.ActivityType.watching, name='Upin Ipin!')
)
self.launched_at = None
self.client_id = config.client_id
if not os.path.exists('data'):
os.mkdir('data')
self.prefixes = JSONDict('data/prefixes.json') # Mapping[guild_id, prefix]
self.bad_words = JSONDict('data/bad_words.json') # Mapping[guild_id, List[str]]
self.configs = JSONDict('data/configs.json') # Mapping[channel_id, config]
self.channels = JSONList('data/channels.json') # List[channel_id]
self.blacklist = JSONList('data/blacklist.json') # List[user_id|guild_id]
self.voice_spam_control = commands.CooldownMapping.from_cooldown(2, 10, commands.BucketType.user)
self.voice_spam_counter = Counter()
self.text_spam_control = commands.CooldownMapping.from_cooldown(8, 10, commands.BucketType.user)
self.text_spam_counter = Counter()
for extension in extensions:
self.load_extension(extension)
async def on_ready(self):
if self.launched_at is None:
self.launched_at = datetime.datetime.utcnow()
for guild in self.guilds:
for channel in guild.voice_channels:
await self.on_voice_leave(channel)
print('Logged in as', self.user)
async def on_message(self, message):
if message.guild is None:
return
await self.process_commands(message)
async def on_message_edit(self, before, after):
if before.content != after.content:
await self.on_message(after)
async def process_commands(self, message):
ctx = await self.get_context(message, cls=Context)
if ctx.command is None:
return
if ctx.author.id in self.blacklist:
return
if not ctx.channel.permissions_for(ctx.guild.me).send_messages:
return
bucket = self.text_spam_control.get_bucket(message)
current = message.created_at.replace(tzinfo=datetime.timezone.utc).timestamp()
retry_after = bucket.update_rate_limit(current)
if retry_after:
self.text_spam_counter[ctx.author.id] += 1
if self.text_spam_counter[ctx.author.id] >= 5:
del self.text_spam_counter[ctx.author.id]
self.blacklist.append(ctx.author.id)
await self.blacklist.save()
await ctx.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.')
else:
self.text_spam_counter.pop(message.author.id, None)
await self.invoke(ctx)
async def on_voice_state_update(self, member, before, after):
if before.channel != after.channel:
if before.channel is not None:
await self.on_voice_leave(before.channel)
if after.channel is not None:
await self.on_voice_join(member, after.channel)
async def on_voice_join(self, member, channel):
if member.id in self.blacklist:
return
if not str(channel.id) in self.configs:
return
perms = member.guild.me.guild_permissions
if not perms.manage_channels or not perms.move_members:
return
fake_message = discord.Object(id=0)
fake_message.author = member
bucket = self.voice_spam_control.get_bucket(fake_message)
retry_after = bucket.update_rate_limit()
if retry_after:
|
else:
settings = self.configs[str(channel.id)]
name = settings.get('name', '@user')
limit = settings.get('limit', 0)
bitrate = settings.get('bitrate', 64000)
top = settings.get('top', False)
try:
category = member.guild.get_channel(settings['category'])
except KeyError:
category = channel.category
if '@user' in name:
name = name.replace('@user', member.display_name)
if '@game' in name:
for activity in member.activities:
if activity.type == discord.ActivityType.playing and activity.name is not None:
name = name.replace('@game', activity.name)
break
else:
name = name.replace('@game', 'no game')
if '@position' in name:
channels = [c for c in category.voice_channels if c.id in self.channels]
name = name.replace('@position', str(len(channels)+1))
words = self.bad_words.get(str(member.guild.id), [])
for word in words:
if word.casefold() in name.casefold():
name = re.sub(word, '*'*len(word), name, flags=re.IGNORECASE)
if len(name) > 100:
name = name[:97] + '...'
if perms.manage_roles:
overwrites = {member: discord.PermissionOverwrite(
manage_channels=True,
move_members=True
)}
else:
overwrites = None
new_channel = await member.guild.create_voice_channel(
overwrites=overwrites,
name=name,
category=category,
user_limit=limit,
bitrate=bitrate
)
if top:
self.loop.create_task(new_channel.edit(position=0))
await member.move_to(new_channel)
self.channels.append(new_channel.id)
await self.channels.save()
async def on_voice_leave(self, channel):
if channel.id in self.channels:
if len(channel.members) == 0:
ch = channel.guild.get_channel(channel.id)
if ch is not None:
perms = channel.permissions_for(channel.guild.me)
if perms.manage_channels:
await channel.delete()
self.channels.remove(channel.id)
await self.channels.save()
async def on_guild_channel_delete(self, channel):
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
return
await self.configs.save()
async def on_guild_remove(self, guild):
try:
self.prefixes.pop(str(guild.id))
except KeyError:
pass
else:
await self.prefixes.save()
try:
self.bad_words.pop(str(guild.id))
except KeyError:
pass
else:
await self.bad_words.save()
channel_dump = False
config_dump = False
for channel in guild.voice_channels:
if channel.id in self.channels:
self.channels.remove(channel.id)
channel_dump = True
if str(channel.id) in self.configs:
try:
self.configs.pop(str(channel.id))
except KeyError:
continue
config_dump = True
if channel_dump:
await self.channels.save()
if config_dump:
await self.configs.save()
async def on_guild_join(self, guild):
if guild.id in self.blacklist:
await guild.leave()
async def on_command_error(self, ctx, error):
if isinstance(error, commands.CommandNotFound):
return
else:
ctx.command.reset_cooldown(ctx)
if isinstance(error, commands.CommandInvokeError) and not isinstance(error.original, menus.MenuError):
error = error.original
traceback.print_exception(error.__class__.__name__, error, error.__traceback__)
owner = self.get_user(self.owner_id)
if owner is not None:
tb = '\n'.join(traceback.format_exception(error.__class__.__name__, error, error.__traceback__))
with suppress(discord.HTTPException):
await owner.send(embed=discord.Embed(
description=f'```py\n{tb}```',
color=discord.Color.red()
))
else:
if isinstance(error, commands.CommandInvokeError):
error = error.original
await ctx.safe_send(msg=str(error).capitalize(), color=discord.Color.red())
if __name__ == "__main__":
Bot().run(os.environ['DISCORD_TOKEN'])
| self.voice_spam_counter[member.id] += 1
if self.voice_spam_counter[member.id] >= 5:
del self.text_spam_counter[member.id]
self.blacklist.append(member.id)
await self.blacklist.save()
with suppress(discord.Forbidden):
await member.send(f'You are being rate limited. Try again in `{retry_after:.2f}` seconds.') | conditional_block |
entity_test.go | Body string
switch i {
case 0:
expectedType = "text/plain"
expectedBody = "Text part"
case 1:
expectedType = "text/html"
expectedBody = "<p>HTML part</p>"
}
if mediaType := p.Header.Get("Content-Type"); mediaType != expectedType {
t.Errorf("Expected part Content-Type to be %q, got %q", expectedType, mediaType)
}
if b, err := ioutil.ReadAll(p.Body); err != nil {
t.Error("Expected no error while reading part body, got", err)
} else if s := string(b); s != expectedBody {
t.Errorf("Expected %q as part body but got %q", expectedBody, s)
}
i++
}
if i != 2 {
t.Fatalf("Expected multipart entity to contain exactly 2 parts, got %v", i)
}
}
func TestNewMultipart(t *testing.T) {
testMultipart(t, testMakeMultipart())
}
func TestNewMultipart_read(t *testing.T) {
e := testMakeMultipart()
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading multipart body, got", err)
} else if s := string(b); s != testMultipartBody {
t.Errorf("Expected %q as multipart body but got %q", testMultipartBody, s)
}
}
func TestRead_multipart(t *testing.T) {
e, err := Read(strings.NewReader(testMultipartText))
if err != nil {
t.Fatal("Expected no error while reading multipart, got", err)
}
testMultipart(t, e)
}
func TestRead_single(t *testing.T) {
e, err := Read(strings.NewReader(testSingleText))
if err != nil {
t.Fatalf("Read() = %v", err)
}
b, err := ioutil.ReadAll(e.Body)
if err != nil {
t.Fatalf("ioutil.ReadAll() = %v", err)
}
expected := "Message body"
if string(b) != expected {
t.Fatalf("Expected body to be %q, got %q", expected, string(b))
}
}
func TestRead_tooBig(t *testing.T) {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is too big.\r\n"
_, err := Read(strings.NewReader(raw))
if err != errHeaderTooBig {
t.Fatalf("Read() = %q, want %q", err, errHeaderTooBig)
}
}
func TestReadOptions_withDefaults(t *testing.T) {
// verify that .withDefaults() doesn't mutate original values
original := &ReadOptions{MaxHeaderBytes: -123}
modified := original.withDefaults() // should set MaxHeaderBytes to math.MaxInt64
if original.MaxHeaderBytes == modified.MaxHeaderBytes {
t.Error("Expected ReadOptions.withDefaults() to not mutate the original value")
}
}
func TestReadWithOptions(t *testing.T) {
tests := []struct {
name string
original *ReadOptions
want *ReadOptions
wantErr bool
}{
{
name: "default value",
original: &ReadOptions{},
want: &ReadOptions{MaxHeaderBytes: defaultMaxHeaderBytes},
wantErr: true,
},
{
name: "infinite header value",
original: &ReadOptions{MaxHeaderBytes: -1},
want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "infinite header value any negative",
original: &ReadOptions{MaxHeaderBytes: -1234},
want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "custom header value",
original: &ReadOptions{MaxHeaderBytes: 128},
want: &ReadOptions{MaxHeaderBytes: 128},
wantErr: true,
},
}
for _, test := range tests {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is very big, but we should allow it via options.\r\n"
t.Run(test.name, func(t *testing.T) {
// First validate the options will be set as expected, or there is no
// point checking the ReadWithOptions func.
got := test.original.withDefaults()
if !reflect.DeepEqual(got, test.want) {
t.Fatalf("ReadOptions.withDefaults() =\n%#v\nbut want:\n%#v", got, test.want)
}
_, err := ReadWithOptions(strings.NewReader(raw), test.original)
gotErr := err != nil
if gotErr != test.wantErr {
t.Errorf("ReadWithOptions() = %t but want: %t", gotErr, test.wantErr)
}
})
}
}
func TestReadWithOptions_nilDefault(t *testing.T) {
raw := "Subject: Something\r\n"
var opts *ReadOptions
opts = nil
_, err := ReadWithOptions(strings.NewReader(raw), opts)
if err != nil {
t.Fatalf("ReadWithOptions() = %v", err)
}
}
func TestEntity_WriteTo_decode(t *testing.T) {
e := testMakeEntity()
e.Header.SetContentType("text/plain", map[string]string{"charset": "utf-8"})
e.Header.Del("Content-Transfer-Encoding")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"cc sava"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_convert(t *testing.T) {
var h Header
h.Set("Content-Type", "text/plain; charset=utf-8")
h.Set("Content-Transfer-Encoding", "base64")
r := strings.NewReader("Qm9uam91ciDDoCB0b3Vz")
e, _ := New(h, r)
e.Header.Set("Content-Transfer-Encoding", "quoted-printable")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Transfer-Encoding: quoted-printable\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"Bonjour =C3=A0 tous"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_multipart(t *testing.T) {
e := testMakeMultipart()
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
if s := b.String(); s != testMultipartText {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", testMultipartText, s)
}
}
func TestNew_unknownTransferEncoding(t *testing.T) {
var h Header
h.Set("Content-Transfer-Encoding", "i-dont-exist")
expected := "hey there"
r := strings.NewReader(expected)
e, err := New(h, r)
if err == nil {
t.Fatal("New(unknown transfer encoding): expected an error")
}
if !IsUnknownEncoding(err) {
t.Fatal("New(unknown transfer encoding): expected an error that verifies IsUnknownEncoding")
}
if !errors.As(err, &UnknownEncodingError{}) {
t.Fatal("New(unknown transfer encoding): expected an error that verifies errors.As(err, &EncodingError{})")
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := string(b); s != expected {
t.Errorf("Expected %q as entity body but got %q", expected, s)
}
}
func TestNew_unknownCharset(t *testing.T) {
var h Header
h.Set("Content-Type", "text/plain; charset=I-DONT-EXIST")
expected := "hey there"
r := strings.NewReader(expected)
e, err := New(h, r)
if err == nil |
if !IsUnknownCharset(err) {
t.Fatal("New(unknown charset): expected an error that verifies IsUnknownCharset")
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := | {
t.Fatal("New(unknown charset): expected an error")
} | conditional_block |
entity_test.go | () *Entity {
var h1 Header
h1.Set("Content-Type", "text/plain")
r1 := strings.NewReader("Text part")
e1, _ := New(h1, r1)
var h2 Header
h2.Set("Content-Type", "text/html")
r2 := strings.NewReader("<p>HTML part</p>")
e2, _ := New(h2, r2)
var h Header
h.Set("Content-Type", "multipart/alternative; boundary=IMTHEBOUNDARY")
e, _ := NewMultipart(h, []*Entity{e1, e2})
return e
}
const testMultipartHeader = "Mime-Version: 1.0\r\n" +
"Content-Type: multipart/alternative; boundary=IMTHEBOUNDARY\r\n\r\n"
const testMultipartBody = "--IMTHEBOUNDARY\r\n" +
"Content-Type: text/plain\r\n" +
"\r\n" +
"Text part\r\n" +
"--IMTHEBOUNDARY\r\n" +
"Content-Type: text/html\r\n" +
"\r\n" +
"<p>HTML part</p>\r\n" +
"--IMTHEBOUNDARY--\r\n"
var testMultipartText = testMultipartHeader + testMultipartBody
const testSingleText = "Content-Type: text/plain\r\n" +
"\r\n" +
"Message body"
func testMultipart(t *testing.T, e *Entity) {
mr := e.MultipartReader()
if mr == nil {
t.Fatalf("Expected MultipartReader not to return nil")
}
defer mr.Close()
i := 0
for {
p, err := mr.NextPart()
if err == io.EOF {
break
} else if err != nil {
t.Fatal("Expected no error while reading multipart entity, got", err)
}
var expectedType string
var expectedBody string
switch i {
case 0:
expectedType = "text/plain"
expectedBody = "Text part"
case 1:
expectedType = "text/html"
expectedBody = "<p>HTML part</p>"
}
if mediaType := p.Header.Get("Content-Type"); mediaType != expectedType {
t.Errorf("Expected part Content-Type to be %q, got %q", expectedType, mediaType)
}
if b, err := ioutil.ReadAll(p.Body); err != nil {
t.Error("Expected no error while reading part body, got", err)
} else if s := string(b); s != expectedBody {
t.Errorf("Expected %q as part body but got %q", expectedBody, s)
}
i++
}
if i != 2 {
t.Fatalf("Expected multipart entity to contain exactly 2 parts, got %v", i)
}
}
func TestNewMultipart(t *testing.T) {
testMultipart(t, testMakeMultipart())
}
func TestNewMultipart_read(t *testing.T) {
e := testMakeMultipart()
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading multipart body, got", err)
} else if s := string(b); s != testMultipartBody {
t.Errorf("Expected %q as multipart body but got %q", testMultipartBody, s)
}
}
func TestRead_multipart(t *testing.T) {
e, err := Read(strings.NewReader(testMultipartText))
if err != nil {
t.Fatal("Expected no error while reading multipart, got", err)
}
testMultipart(t, e)
}
func TestRead_single(t *testing.T) {
e, err := Read(strings.NewReader(testSingleText))
if err != nil {
t.Fatalf("Read() = %v", err)
}
b, err := ioutil.ReadAll(e.Body)
if err != nil {
t.Fatalf("ioutil.ReadAll() = %v", err)
}
expected := "Message body"
if string(b) != expected {
t.Fatalf("Expected body to be %q, got %q", expected, string(b))
}
}
func TestRead_tooBig(t *testing.T) {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is too big.\r\n"
_, err := Read(strings.NewReader(raw))
if err != errHeaderTooBig {
t.Fatalf("Read() = %q, want %q", err, errHeaderTooBig)
}
}
func TestReadOptions_withDefaults(t *testing.T) {
// verify that .withDefaults() doesn't mutate original values
original := &ReadOptions{MaxHeaderBytes: -123}
modified := original.withDefaults() // should set MaxHeaderBytes to math.MaxInt64
if original.MaxHeaderBytes == modified.MaxHeaderBytes {
t.Error("Expected ReadOptions.withDefaults() to not mutate the original value")
}
}
func TestReadWithOptions(t *testing.T) {
tests := []struct {
name string
original *ReadOptions
want *ReadOptions
wantErr bool
}{
{
name: "default value",
original: &ReadOptions{},
want: &ReadOptions{MaxHeaderBytes: defaultMaxHeaderBytes},
wantErr: true,
},
{
name: "infinite header value",
original: &ReadOptions{MaxHeaderBytes: -1},
want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "infinite header value any negative",
original: &ReadOptions{MaxHeaderBytes: -1234},
want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "custom header value",
original: &ReadOptions{MaxHeaderBytes: 128},
want: &ReadOptions{MaxHeaderBytes: 128},
wantErr: true,
},
}
for _, test := range tests {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is very big, but we should allow it via options.\r\n"
t.Run(test.name, func(t *testing.T) {
// First validate the options will be set as expected, or there is no
// point checking the ReadWithOptions func.
got := test.original.withDefaults()
if !reflect.DeepEqual(got, test.want) {
t.Fatalf("ReadOptions.withDefaults() =\n%#v\nbut want:\n%#v", got, test.want)
}
_, err := ReadWithOptions(strings.NewReader(raw), test.original)
gotErr := err != nil
if gotErr != test.wantErr {
t.Errorf("ReadWithOptions() = %t but want: %t", gotErr, test.wantErr)
}
})
}
}
func TestReadWithOptions_nilDefault(t *testing.T) {
raw := "Subject: Something\r\n"
var opts *ReadOptions
opts = nil
_, err := ReadWithOptions(strings.NewReader(raw), opts)
if err != nil {
t.Fatalf("ReadWithOptions() = %v", err)
}
}
func TestEntity_WriteTo_decode(t *testing.T) {
e := testMakeEntity()
e.Header.SetContentType("text/plain", map[string]string{"charset": "utf-8"})
e.Header.Del("Content-Transfer-Encoding")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"cc sava"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_convert(t *testing.T) {
var h Header
h.Set("Content-Type", "text/plain; charset=utf-8")
h.Set("Content-Transfer-Encoding", "base64")
r := strings.NewReader("Qm9uam91ciDDoCB0b3Vz")
e, _ := New(h, r)
e.Header.Set("Content-Transfer-Encoding", "quoted-printable")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Transfer-Encoding: quoted-printable\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"Bonjour =C3=A0 tous"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_multipart(t *testing.T) {
e := testMakeMultipart()
var b bytes.Buffer
if err := e.WriteTo | testMakeMultipart | identifier_name |
|
entity_test.go | want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "custom header value",
original: &ReadOptions{MaxHeaderBytes: 128},
want: &ReadOptions{MaxHeaderBytes: 128},
wantErr: true,
},
}
for _, test := range tests {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is very big, but we should allow it via options.\r\n"
t.Run(test.name, func(t *testing.T) {
// First validate the options will be set as expected, or there is no
// point checking the ReadWithOptions func.
got := test.original.withDefaults()
if !reflect.DeepEqual(got, test.want) {
t.Fatalf("ReadOptions.withDefaults() =\n%#v\nbut want:\n%#v", got, test.want)
}
_, err := ReadWithOptions(strings.NewReader(raw), test.original)
gotErr := err != nil
if gotErr != test.wantErr {
t.Errorf("ReadWithOptions() = %t but want: %t", gotErr, test.wantErr)
}
})
}
}
func TestReadWithOptions_nilDefault(t *testing.T) {
raw := "Subject: Something\r\n"
var opts *ReadOptions
opts = nil
_, err := ReadWithOptions(strings.NewReader(raw), opts)
if err != nil {
t.Fatalf("ReadWithOptions() = %v", err)
}
}
func TestEntity_WriteTo_decode(t *testing.T) {
e := testMakeEntity()
e.Header.SetContentType("text/plain", map[string]string{"charset": "utf-8"})
e.Header.Del("Content-Transfer-Encoding")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"cc sava"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_convert(t *testing.T) {
var h Header
h.Set("Content-Type", "text/plain; charset=utf-8")
h.Set("Content-Transfer-Encoding", "base64")
r := strings.NewReader("Qm9uam91ciDDoCB0b3Vz")
e, _ := New(h, r)
e.Header.Set("Content-Transfer-Encoding", "quoted-printable")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Transfer-Encoding: quoted-printable\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"Bonjour =C3=A0 tous"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_multipart(t *testing.T) {
e := testMakeMultipart()
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
if s := b.String(); s != testMultipartText {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", testMultipartText, s)
}
}
func TestNew_unknownTransferEncoding(t *testing.T) {
var h Header
h.Set("Content-Transfer-Encoding", "i-dont-exist")
expected := "hey there"
r := strings.NewReader(expected)
e, err := New(h, r)
if err == nil {
t.Fatal("New(unknown transfer encoding): expected an error")
}
if !IsUnknownEncoding(err) {
t.Fatal("New(unknown transfer encoding): expected an error that verifies IsUnknownEncoding")
}
if !errors.As(err, &UnknownEncodingError{}) {
t.Fatal("New(unknown transfer encoding): expected an error that verifies errors.As(err, &EncodingError{})")
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := string(b); s != expected {
t.Errorf("Expected %q as entity body but got %q", expected, s)
}
}
func TestNew_unknownCharset(t *testing.T) {
var h Header
h.Set("Content-Type", "text/plain; charset=I-DONT-EXIST")
expected := "hey there"
r := strings.NewReader(expected)
e, err := New(h, r)
if err == nil {
t.Fatal("New(unknown charset): expected an error")
}
if !IsUnknownCharset(err) {
t.Fatal("New(unknown charset): expected an error that verifies IsUnknownCharset")
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := string(b); s != expected {
t.Errorf("Expected %q as entity body but got %q", expected, s)
}
}
// Checks that we are compatible both with lines longer than 72 octets and
// FWS indented lines - per RFC-2045 whitespace should be ignored.
func TestNew_paddedBase64(t *testing.T) {
testPartRaw := "Content-Type: text/plain; name=\"test.txt\"\r\n" +
"Content-Transfer-Encoding: base64\r\n" +
"Content-ID: <[email protected]>\r\n" +
"Content-Disposition: attachment; filename=\"text.txt\"\r\n" +
"\r\n" +
"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdCwgc2VkIGRvIGVpdXNtb2QgdGVtc\r\n" +
" G9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxd\r\n" +
" WlzIG5vc3RydWQgZXhlcmNpdGF0aW9uIHVsbGFtY28gbGFib3JpcyBuaXNpIHV0IGFsaXF1aXAgZXggZWEgY29tbW9kbyBjb25zZ\r\n" +
" XF1YXQuIER1aXMgYXV0ZSBpcnVyZSBkb2xvciBpbiByZXByZWhlbmRlcml0IGluIHZvbHVwdGF0ZSB2ZWxpdCBlc3NlIGNpbGx1b\r\n" +
" SBkb2xvcmUgZXUgZnVnaWF0IG51bGxhIHBhcmlhdHVyLiBFeGNlcHRldXIgc2ludCBvY2NhZWNhdCBjdXBpZGF0YXQgbm9uIHByb\r\n" +
" 2lkZW50LCBzdW50IGluIGN1bHBhIHF1aSBvZmZpY2lhIGRlc2VydW50IG1vbGxpdCBhbmltIGlkIGVzdCBsYWJvcnVtLg=="
expected := "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed" +
" do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut e" +
"nim ad minim veniam, quis nostrud exercitation ullamco laboris nisi " +
"ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehe" +
"nderit in voluptate velit esse cillum dolore eu fugiat nulla pariatu" +
"r. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui" +
" officia deserunt mollit anim id est laborum."
e, err := Read(strings.NewReader(testPartRaw))
if err != nil {
t.Fatal("New(padded Base64): expected no error, got", err)
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := string(b); s != expected {
t.Errorf("Expected %q as entity body but got %q", expected, s)
}
| } | random_line_split |
|
entity_test.go | Body string
switch i {
case 0:
expectedType = "text/plain"
expectedBody = "Text part"
case 1:
expectedType = "text/html"
expectedBody = "<p>HTML part</p>"
}
if mediaType := p.Header.Get("Content-Type"); mediaType != expectedType {
t.Errorf("Expected part Content-Type to be %q, got %q", expectedType, mediaType)
}
if b, err := ioutil.ReadAll(p.Body); err != nil {
t.Error("Expected no error while reading part body, got", err)
} else if s := string(b); s != expectedBody {
t.Errorf("Expected %q as part body but got %q", expectedBody, s)
}
i++
}
if i != 2 {
t.Fatalf("Expected multipart entity to contain exactly 2 parts, got %v", i)
}
}
func TestNewMultipart(t *testing.T) {
testMultipart(t, testMakeMultipart())
}
func TestNewMultipart_read(t *testing.T) {
e := testMakeMultipart()
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading multipart body, got", err)
} else if s := string(b); s != testMultipartBody {
t.Errorf("Expected %q as multipart body but got %q", testMultipartBody, s)
}
}
func TestRead_multipart(t *testing.T) {
e, err := Read(strings.NewReader(testMultipartText))
if err != nil {
t.Fatal("Expected no error while reading multipart, got", err)
}
testMultipart(t, e)
}
func TestRead_single(t *testing.T) {
e, err := Read(strings.NewReader(testSingleText))
if err != nil {
t.Fatalf("Read() = %v", err)
}
b, err := ioutil.ReadAll(e.Body)
if err != nil {
t.Fatalf("ioutil.ReadAll() = %v", err)
}
expected := "Message body"
if string(b) != expected {
t.Fatalf("Expected body to be %q, got %q", expected, string(b))
}
}
func TestRead_tooBig(t *testing.T) {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is too big.\r\n"
_, err := Read(strings.NewReader(raw))
if err != errHeaderTooBig {
t.Fatalf("Read() = %q, want %q", err, errHeaderTooBig)
}
}
func TestReadOptions_withDefaults(t *testing.T) {
// verify that .withDefaults() doesn't mutate original values
original := &ReadOptions{MaxHeaderBytes: -123}
modified := original.withDefaults() // should set MaxHeaderBytes to math.MaxInt64
if original.MaxHeaderBytes == modified.MaxHeaderBytes {
t.Error("Expected ReadOptions.withDefaults() to not mutate the original value")
}
}
func TestReadWithOptions(t *testing.T) {
tests := []struct {
name string
original *ReadOptions
want *ReadOptions
wantErr bool
}{
{
name: "default value",
original: &ReadOptions{},
want: &ReadOptions{MaxHeaderBytes: defaultMaxHeaderBytes},
wantErr: true,
},
{
name: "infinite header value",
original: &ReadOptions{MaxHeaderBytes: -1},
want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "infinite header value any negative",
original: &ReadOptions{MaxHeaderBytes: -1234},
want: &ReadOptions{MaxHeaderBytes: math.MaxInt64},
wantErr: false,
},
{
name: "custom header value",
original: &ReadOptions{MaxHeaderBytes: 128},
want: &ReadOptions{MaxHeaderBytes: 128},
wantErr: true,
},
}
for _, test := range tests {
raw := "Subject: " + strings.Repeat("A", 4096*1024) + "\r\n" +
"\r\n" +
"This header is very big, but we should allow it via options.\r\n"
t.Run(test.name, func(t *testing.T) {
// First validate the options will be set as expected, or there is no
// point checking the ReadWithOptions func.
got := test.original.withDefaults()
if !reflect.DeepEqual(got, test.want) {
t.Fatalf("ReadOptions.withDefaults() =\n%#v\nbut want:\n%#v", got, test.want)
}
_, err := ReadWithOptions(strings.NewReader(raw), test.original)
gotErr := err != nil
if gotErr != test.wantErr {
t.Errorf("ReadWithOptions() = %t but want: %t", gotErr, test.wantErr)
}
})
}
}
func TestReadWithOptions_nilDefault(t *testing.T) {
raw := "Subject: Something\r\n"
var opts *ReadOptions
opts = nil
_, err := ReadWithOptions(strings.NewReader(raw), opts)
if err != nil {
t.Fatalf("ReadWithOptions() = %v", err)
}
}
func TestEntity_WriteTo_decode(t *testing.T) {
e := testMakeEntity()
e.Header.SetContentType("text/plain", map[string]string{"charset": "utf-8"})
e.Header.Del("Content-Transfer-Encoding")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"cc sava"
if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_convert(t *testing.T) | if s := b.String(); s != expected {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", expected, s)
}
}
func TestEntity_WriteTo_multipart(t *testing.T) {
e := testMakeMultipart()
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
if s := b.String(); s != testMultipartText {
t.Errorf("Expected written entity to be:\n%s\nbut got:\n%s", testMultipartText, s)
}
}
func TestNew_unknownTransferEncoding(t *testing.T) {
var h Header
h.Set("Content-Transfer-Encoding", "i-dont-exist")
expected := "hey there"
r := strings.NewReader(expected)
e, err := New(h, r)
if err == nil {
t.Fatal("New(unknown transfer encoding): expected an error")
}
if !IsUnknownEncoding(err) {
t.Fatal("New(unknown transfer encoding): expected an error that verifies IsUnknownEncoding")
}
if !errors.As(err, &UnknownEncodingError{}) {
t.Fatal("New(unknown transfer encoding): expected an error that verifies errors.As(err, &EncodingError{})")
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := string(b); s != expected {
t.Errorf("Expected %q as entity body but got %q", expected, s)
}
}
func TestNew_unknownCharset(t *testing.T) {
var h Header
h.Set("Content-Type", "text/plain; charset=I-DONT-EXIST")
expected := "hey there"
r := strings.NewReader(expected)
e, err := New(h, r)
if err == nil {
t.Fatal("New(unknown charset): expected an error")
}
if !IsUnknownCharset(err) {
t.Fatal("New(unknown charset): expected an error that verifies IsUnknownCharset")
}
if b, err := ioutil.ReadAll(e.Body); err != nil {
t.Error("Expected no error while reading entity body, got", err)
} else if s := string | {
var h Header
h.Set("Content-Type", "text/plain; charset=utf-8")
h.Set("Content-Transfer-Encoding", "base64")
r := strings.NewReader("Qm9uam91ciDDoCB0b3Vz")
e, _ := New(h, r)
e.Header.Set("Content-Transfer-Encoding", "quoted-printable")
var b bytes.Buffer
if err := e.WriteTo(&b); err != nil {
t.Fatal("Expected no error while writing entity, got", err)
}
expected := "Mime-Version: 1.0\r\n" +
"Content-Transfer-Encoding: quoted-printable\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"\r\n" +
"Bonjour =C3=A0 tous"
| identifier_body |
main.py | grid(domain_values, domain_values,
domain_values, indexing='ij')
n = len(domain_values)
num_slices = u.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
u_slice = np.reshape(u[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=x_grid.flatten(),
y=y_grid.flatten(),
z=z_grid.flatten(),
value=normalize(u_slice).flatten(),
isomin=0.4,
isomax=0.4,
surface_count=1,
colorscale="Viridis",
),
row=1,
col=s+1
)
pio.write_html(fig, filename)
def get_fft(u: np.ndarray, n: int) -> np.ndarray:
"""
Gets the fft of the data.
"""
# We get the fft of each time slice.
num_slices = u.shape[0]
ut = np.empty(u.shape, dtype=complex) # shape (20, 262144)
for s in range(num_slices):
# We reshape each slice into a 3D cube.
u_slice = np.reshape(u[s,:], (n, n, n)) # shape (64, 64, 64)
# We then take the fft of the 3D cube and add it to ut.
ut_slice = np.fft.fftshift(np.fft.fftn(u_slice)) # shape (64, 64, 64)
ut[s, :] = ut_slice.flatten()
return ut
def average_fft(ut: np.ndarray) -> np.ndarray:
"""
Gets the average fft of the data.
"""
# We average over each row of ut.
ut_average = np.average(ut, axis=0) # shape (262144,)
return ut_average
def plot_fft_isosurface(title: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots an isosurface 3D graph in frequency domain.
"""
print(f'Plotting fft isosurface: {title}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
fig = go.Figure()
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize(ut).flatten(),
opacity=0.5,
isomin=0.6,
isomax=0.9,
surface_count=3,
colorscale="Viridis",
)
)
fig.update_layout(
title_text=title,
scene_xaxis_title_text='omega_x',
scene_yaxis_title_text='omega_y',
scene_zaxis_title_text='omega_z',
)
pio.write_html(fig, filename)
def get_peak_frequency(ut_average: np.ndarray,
omega: np.ndarray) -> Tuple[float, float, float]:
"""
Gets the peak frequency of the average fft.
"""
# We get the indices of the peak of the average fft.
n = len(omega)
argmax = np.argmax(np.abs(ut_average))
[index_x, index_y, index_z] = np.unravel_index(argmax, (n, n, n))
# We then use those indices to get the peak frequency.
return (omega[index_x], omega[index_y], omega[index_z])
def print_peak_frequency(omega_x: float, omega_y: float,
omega_z: float) -> None:
"""
Prints the peak frequency.
"""
print(f'The peak frequency is: ({omega_x}, {omega_y}, {omega_z})')
def get_filter(omega_x: float, omega_y: float, omega_z: float,
omega: np.ndarray) -> np.ndarray:
"""
Creates the filter used to denoise the data.
"""
# A 3D Gaussian is the product of three 1D Gaussians.
variance = 2.5
c = 1 / (2 * variance) # 0.2
filter_x = np.exp(-c*np.power(omega-omega_x, 2))
filter_y = np.exp(-c*np.power(omega-omega_y, 2))
filter_z = np.exp(-c*np.power(omega-omega_z, 2))
filter_3d = np.multiply.outer(np.multiply.outer(filter_x, filter_y),
filter_z)
return filter_3d
def denoise_frequency_domain(ut: np.ndarray,
filter_3d: np.ndarray) -> np.ndarray:
"""
Denoise ut by multiplying it by the filter.
"""
num_rows = ut.shape[0]
ut_denoised = np.empty(ut.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut[row, :], filter_3d.shape)
ut_slice_cube_denoised = ut_slice_cube*filter_3d
ut_denoised[row, :] = ut_slice_cube_denoised.flatten()
return ut_denoised
def get_denoised_spatial_domain(ut_denoised: np.ndarray, n: int) -> np.ndarray:
"""
Converts denoised matrix in frequency domain into spatial domain.
"""
num_rows = ut_denoised.shape[0]
u_denoised = np.empty(ut_denoised.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut_denoised[row, :], (n, n, n))
u_denoised_cube = np.fft.ifftn(np.fft.ifftshift(ut_slice_cube))
u_denoised[row, :] = u_denoised_cube.flatten()
return u_denoised
def plot_fft_isosurfaces(description: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots a few slices of the the FFTed data using isosurfaces.
"""
print(f'Plotting fft isosurfaces: {description}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
n = len(omega)
num_slices = ut.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
ut_slice = np.reshape(ut[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize(ut_slice).flatten(),
opacity=0.5,
isomin=0.6,
isomax=0.9,
surface_count=3,
colorscale="Viridis",
),
row=1,
col=s+1
)
fig.update_layout(
scene_xaxis_title_text="omega_x",
scene_yaxis_title_text="omega_y",
scene_zaxis_title_text="omega_z",
scene2_xaxis_title_text="omega_x",
scene2_yaxis_title_text="omega_y",
scene2_zaxis_title_text="omega_z",
scene3_xaxis_title_text="omega_x",
scene3_yaxis_title_text="omega_y",
scene3_zaxis_title_text="omega_z",
)
pio.write_html(fig, filename)
def get_marble_path(u_denoised: np.ndarray,
domain_values: np.ndarray) -> np.ndarray:
"""
Gets the path of the marble.
"""
n = len(domain_values)
num_rows = u_denoised.shape[0]
marble_path = np.empty((num_rows, 3))
for row in range(num_rows):
| ut_slice_cube = np.reshape(u_denoised[row, :], (n, n, n))
argmax = np.argmax(np.abs(ut_slice_cube))
[index_x, index_y, index_z] = np.unravel_index(argmax, (n, n, n))
marble_path[row][0] = domain_values[index_x]
marble_path[row][1] = domain_values[index_y]
marble_path[row][2] = domain_values[index_z] | conditional_block |
|
main.py | points in
frequency domain.
"""
# Time or spatial domain discretization.
# Since we have periodic boundary conditions, the first and last points
# are the same. So we consider only the first n points in the time domain.
t_shifted = np.linspace(-domain_limit, domain_limit, n+1)[0:-1]
# Frequency domain discretization.
omega_points = np.linspace(-n/2, n/2, n+1)[0:-1]
omega_shifted = (2 * np.pi)/(2 * domain_limit) * omega_points
half_n = n//2
omega_unshifted = np.concatenate((omega_shifted[half_n:n],
omega_shifted[0:half_n]))
assert np.max(omega_shifted) == np.max(omega_unshifted)
assert np.min(omega_shifted) == np.min(omega_unshifted)
assert omega_shifted.size == omega_unshifted.size
return (t_shifted, omega_shifted)
def normalize(my_array: np.ndarray) -> np.ndarray:
"""
Takes the absolute value of an ndarray and normalizes it. | return np.abs(my_array)/np.max(np.abs(my_array))
def plot_spatial_isosurfaces(description: str, domain_values: np.ndarray,
u: np.ndarray, filename: str) -> None:
"""
Plots a few slices of the the data using isosurfaces.
"""
print(f'Plotting spatial isosurfaces: {description}...')
(x_grid, y_grid, z_grid) = np.meshgrid(domain_values, domain_values,
domain_values, indexing='ij')
n = len(domain_values)
num_slices = u.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
u_slice = np.reshape(u[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=x_grid.flatten(),
y=y_grid.flatten(),
z=z_grid.flatten(),
value=normalize(u_slice).flatten(),
isomin=0.4,
isomax=0.4,
surface_count=1,
colorscale="Viridis",
),
row=1,
col=s+1
)
pio.write_html(fig, filename)
def get_fft(u: np.ndarray, n: int) -> np.ndarray:
"""
Gets the fft of the data.
"""
# We get the fft of each time slice.
num_slices = u.shape[0]
ut = np.empty(u.shape, dtype=complex) # shape (20, 262144)
for s in range(num_slices):
# We reshape each slice into a 3D cube.
u_slice = np.reshape(u[s,:], (n, n, n)) # shape (64, 64, 64)
# We then take the fft of the 3D cube and add it to ut.
ut_slice = np.fft.fftshift(np.fft.fftn(u_slice)) # shape (64, 64, 64)
ut[s, :] = ut_slice.flatten()
return ut
def average_fft(ut: np.ndarray) -> np.ndarray:
"""
Gets the average fft of the data.
"""
# We average over each row of ut.
ut_average = np.average(ut, axis=0) # shape (262144,)
return ut_average
def plot_fft_isosurface(title: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots an isosurface 3D graph in frequency domain.
"""
print(f'Plotting fft isosurface: {title}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
fig = go.Figure()
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize(ut).flatten(),
opacity=0.5,
isomin=0.6,
isomax=0.9,
surface_count=3,
colorscale="Viridis",
)
)
fig.update_layout(
title_text=title,
scene_xaxis_title_text='omega_x',
scene_yaxis_title_text='omega_y',
scene_zaxis_title_text='omega_z',
)
pio.write_html(fig, filename)
def get_peak_frequency(ut_average: np.ndarray,
omega: np.ndarray) -> Tuple[float, float, float]:
"""
Gets the peak frequency of the average fft.
"""
# We get the indices of the peak of the average fft.
n = len(omega)
argmax = np.argmax(np.abs(ut_average))
[index_x, index_y, index_z] = np.unravel_index(argmax, (n, n, n))
# We then use those indices to get the peak frequency.
return (omega[index_x], omega[index_y], omega[index_z])
def print_peak_frequency(omega_x: float, omega_y: float,
omega_z: float) -> None:
"""
Prints the peak frequency.
"""
print(f'The peak frequency is: ({omega_x}, {omega_y}, {omega_z})')
def get_filter(omega_x: float, omega_y: float, omega_z: float,
omega: np.ndarray) -> np.ndarray:
"""
Creates the filter used to denoise the data.
"""
# A 3D Gaussian is the product of three 1D Gaussians.
variance = 2.5
c = 1 / (2 * variance) # 0.2
filter_x = np.exp(-c*np.power(omega-omega_x, 2))
filter_y = np.exp(-c*np.power(omega-omega_y, 2))
filter_z = np.exp(-c*np.power(omega-omega_z, 2))
filter_3d = np.multiply.outer(np.multiply.outer(filter_x, filter_y),
filter_z)
return filter_3d
def denoise_frequency_domain(ut: np.ndarray,
filter_3d: np.ndarray) -> np.ndarray:
"""
Denoise ut by multiplying it by the filter.
"""
num_rows = ut.shape[0]
ut_denoised = np.empty(ut.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut[row, :], filter_3d.shape)
ut_slice_cube_denoised = ut_slice_cube*filter_3d
ut_denoised[row, :] = ut_slice_cube_denoised.flatten()
return ut_denoised
def get_denoised_spatial_domain(ut_denoised: np.ndarray, n: int) -> np.ndarray:
"""
Converts denoised matrix in frequency domain into spatial domain.
"""
num_rows = ut_denoised.shape[0]
u_denoised = np.empty(ut_denoised.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut_denoised[row, :], (n, n, n))
u_denoised_cube = np.fft.ifftn(np.fft.ifftshift(ut_slice_cube))
u_denoised[row, :] = u_denoised_cube.flatten()
return u_denoised
def plot_fft_isosurfaces(description: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots a few slices of the the FFTed data using isosurfaces.
"""
print(f'Plotting fft isosurfaces: {description}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
n = len(omega)
num_slices = ut.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
ut_slice = np.reshape(ut[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize | """
| random_line_split |
main.py | points in
frequency domain.
"""
# Time or spatial domain discretization.
# Since we have periodic boundary conditions, the first and last points
# are the same. So we consider only the first n points in the time domain.
t_shifted = np.linspace(-domain_limit, domain_limit, n+1)[0:-1]
# Frequency domain discretization.
omega_points = np.linspace(-n/2, n/2, n+1)[0:-1]
omega_shifted = (2 * np.pi)/(2 * domain_limit) * omega_points
half_n = n//2
omega_unshifted = np.concatenate((omega_shifted[half_n:n],
omega_shifted[0:half_n]))
assert np.max(omega_shifted) == np.max(omega_unshifted)
assert np.min(omega_shifted) == np.min(omega_unshifted)
assert omega_shifted.size == omega_unshifted.size
return (t_shifted, omega_shifted)
def normalize(my_array: np.ndarray) -> np.ndarray:
"""
Takes the absolute value of an ndarray and normalizes it.
"""
return np.abs(my_array)/np.max(np.abs(my_array))
def plot_spatial_isosurfaces(description: str, domain_values: np.ndarray,
u: np.ndarray, filename: str) -> None:
"""
Plots a few slices of the the data using isosurfaces.
"""
print(f'Plotting spatial isosurfaces: {description}...')
(x_grid, y_grid, z_grid) = np.meshgrid(domain_values, domain_values,
domain_values, indexing='ij')
n = len(domain_values)
num_slices = u.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
u_slice = np.reshape(u[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=x_grid.flatten(),
y=y_grid.flatten(),
z=z_grid.flatten(),
value=normalize(u_slice).flatten(),
isomin=0.4,
isomax=0.4,
surface_count=1,
colorscale="Viridis",
),
row=1,
col=s+1
)
pio.write_html(fig, filename)
def get_fft(u: np.ndarray, n: int) -> np.ndarray:
"""
Gets the fft of the data.
"""
# We get the fft of each time slice.
num_slices = u.shape[0]
ut = np.empty(u.shape, dtype=complex) # shape (20, 262144)
for s in range(num_slices):
# We reshape each slice into a 3D cube.
u_slice = np.reshape(u[s,:], (n, n, n)) # shape (64, 64, 64)
# We then take the fft of the 3D cube and add it to ut.
ut_slice = np.fft.fftshift(np.fft.fftn(u_slice)) # shape (64, 64, 64)
ut[s, :] = ut_slice.flatten()
return ut
def average_fft(ut: np.ndarray) -> np.ndarray:
"""
Gets the average fft of the data.
"""
# We average over each row of ut.
ut_average = np.average(ut, axis=0) # shape (262144,)
return ut_average
def plot_fft_isosurface(title: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots an isosurface 3D graph in frequency domain.
"""
print(f'Plotting fft isosurface: {title}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
fig = go.Figure()
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize(ut).flatten(),
opacity=0.5,
isomin=0.6,
isomax=0.9,
surface_count=3,
colorscale="Viridis",
)
)
fig.update_layout(
title_text=title,
scene_xaxis_title_text='omega_x',
scene_yaxis_title_text='omega_y',
scene_zaxis_title_text='omega_z',
)
pio.write_html(fig, filename)
def get_peak_frequency(ut_average: np.ndarray,
omega: np.ndarray) -> Tuple[float, float, float]:
"""
Gets the peak frequency of the average fft.
"""
# We get the indices of the peak of the average fft.
n = len(omega)
argmax = np.argmax(np.abs(ut_average))
[index_x, index_y, index_z] = np.unravel_index(argmax, (n, n, n))
# We then use those indices to get the peak frequency.
return (omega[index_x], omega[index_y], omega[index_z])
def | (omega_x: float, omega_y: float,
omega_z: float) -> None:
"""
Prints the peak frequency.
"""
print(f'The peak frequency is: ({omega_x}, {omega_y}, {omega_z})')
def get_filter(omega_x: float, omega_y: float, omega_z: float,
omega: np.ndarray) -> np.ndarray:
"""
Creates the filter used to denoise the data.
"""
# A 3D Gaussian is the product of three 1D Gaussians.
variance = 2.5
c = 1 / (2 * variance) # 0.2
filter_x = np.exp(-c*np.power(omega-omega_x, 2))
filter_y = np.exp(-c*np.power(omega-omega_y, 2))
filter_z = np.exp(-c*np.power(omega-omega_z, 2))
filter_3d = np.multiply.outer(np.multiply.outer(filter_x, filter_y),
filter_z)
return filter_3d
def denoise_frequency_domain(ut: np.ndarray,
filter_3d: np.ndarray) -> np.ndarray:
"""
Denoise ut by multiplying it by the filter.
"""
num_rows = ut.shape[0]
ut_denoised = np.empty(ut.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut[row, :], filter_3d.shape)
ut_slice_cube_denoised = ut_slice_cube*filter_3d
ut_denoised[row, :] = ut_slice_cube_denoised.flatten()
return ut_denoised
def get_denoised_spatial_domain(ut_denoised: np.ndarray, n: int) -> np.ndarray:
"""
Converts denoised matrix in frequency domain into spatial domain.
"""
num_rows = ut_denoised.shape[0]
u_denoised = np.empty(ut_denoised.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut_denoised[row, :], (n, n, n))
u_denoised_cube = np.fft.ifftn(np.fft.ifftshift(ut_slice_cube))
u_denoised[row, :] = u_denoised_cube.flatten()
return u_denoised
def plot_fft_isosurfaces(description: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots a few slices of the the FFTed data using isosurfaces.
"""
print(f'Plotting fft isosurfaces: {description}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
n = len(omega)
num_slices = ut.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
ut_slice = np.reshape(ut[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value= | print_peak_frequency | identifier_name |
main.py | for s in range(len(slices)):
u_slice = np.reshape(u[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=x_grid.flatten(),
y=y_grid.flatten(),
z=z_grid.flatten(),
value=normalize(u_slice).flatten(),
isomin=0.4,
isomax=0.4,
surface_count=1,
colorscale="Viridis",
),
row=1,
col=s+1
)
pio.write_html(fig, filename)
def get_fft(u: np.ndarray, n: int) -> np.ndarray:
"""
Gets the fft of the data.
"""
# We get the fft of each time slice.
num_slices = u.shape[0]
ut = np.empty(u.shape, dtype=complex) # shape (20, 262144)
for s in range(num_slices):
# We reshape each slice into a 3D cube.
u_slice = np.reshape(u[s,:], (n, n, n)) # shape (64, 64, 64)
# We then take the fft of the 3D cube and add it to ut.
ut_slice = np.fft.fftshift(np.fft.fftn(u_slice)) # shape (64, 64, 64)
ut[s, :] = ut_slice.flatten()
return ut
def average_fft(ut: np.ndarray) -> np.ndarray:
"""
Gets the average fft of the data.
"""
# We average over each row of ut.
ut_average = np.average(ut, axis=0) # shape (262144,)
return ut_average
def plot_fft_isosurface(title: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots an isosurface 3D graph in frequency domain.
"""
print(f'Plotting fft isosurface: {title}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
fig = go.Figure()
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize(ut).flatten(),
opacity=0.5,
isomin=0.6,
isomax=0.9,
surface_count=3,
colorscale="Viridis",
)
)
fig.update_layout(
title_text=title,
scene_xaxis_title_text='omega_x',
scene_yaxis_title_text='omega_y',
scene_zaxis_title_text='omega_z',
)
pio.write_html(fig, filename)
def get_peak_frequency(ut_average: np.ndarray,
omega: np.ndarray) -> Tuple[float, float, float]:
"""
Gets the peak frequency of the average fft.
"""
# We get the indices of the peak of the average fft.
n = len(omega)
argmax = np.argmax(np.abs(ut_average))
[index_x, index_y, index_z] = np.unravel_index(argmax, (n, n, n))
# We then use those indices to get the peak frequency.
return (omega[index_x], omega[index_y], omega[index_z])
def print_peak_frequency(omega_x: float, omega_y: float,
omega_z: float) -> None:
"""
Prints the peak frequency.
"""
print(f'The peak frequency is: ({omega_x}, {omega_y}, {omega_z})')
def get_filter(omega_x: float, omega_y: float, omega_z: float,
omega: np.ndarray) -> np.ndarray:
"""
Creates the filter used to denoise the data.
"""
# A 3D Gaussian is the product of three 1D Gaussians.
variance = 2.5
c = 1 / (2 * variance) # 0.2
filter_x = np.exp(-c*np.power(omega-omega_x, 2))
filter_y = np.exp(-c*np.power(omega-omega_y, 2))
filter_z = np.exp(-c*np.power(omega-omega_z, 2))
filter_3d = np.multiply.outer(np.multiply.outer(filter_x, filter_y),
filter_z)
return filter_3d
def denoise_frequency_domain(ut: np.ndarray,
filter_3d: np.ndarray) -> np.ndarray:
"""
Denoise ut by multiplying it by the filter.
"""
num_rows = ut.shape[0]
ut_denoised = np.empty(ut.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut[row, :], filter_3d.shape)
ut_slice_cube_denoised = ut_slice_cube*filter_3d
ut_denoised[row, :] = ut_slice_cube_denoised.flatten()
return ut_denoised
def get_denoised_spatial_domain(ut_denoised: np.ndarray, n: int) -> np.ndarray:
"""
Converts denoised matrix in frequency domain into spatial domain.
"""
num_rows = ut_denoised.shape[0]
u_denoised = np.empty(ut_denoised.shape, dtype=complex)
for row in range(num_rows):
ut_slice_cube = np.reshape(ut_denoised[row, :], (n, n, n))
u_denoised_cube = np.fft.ifftn(np.fft.ifftshift(ut_slice_cube))
u_denoised[row, :] = u_denoised_cube.flatten()
return u_denoised
def plot_fft_isosurfaces(description: str, omega: np.ndarray,
ut: np.ndarray, filename: str) -> None:
"""
Plots a few slices of the the FFTed data using isosurfaces.
"""
print(f'Plotting fft isosurfaces: {description}...')
(omega_x_grid, omega_y_grid, omega_z_grid) = np.meshgrid(omega, omega,
omega, indexing='ij')
n = len(omega)
num_slices = ut.shape[0]
# We only want to plot the first, middle, and last time slices.
slices = [0, num_slices//2, num_slices-1]
titles = [f'{description}: slice {slice}' for slice in slices]
num_rows = 1
num_cols = len(slices)
fig = make_subplots(
rows=num_rows,
cols=num_cols,
specs=[
[{'is_3d': True}]*num_cols,
]*num_rows,
subplot_titles=titles,
)
for s in range(len(slices)):
ut_slice = np.reshape(ut[slices[s],:], (n, n, n))
fig.add_trace(
go.Isosurface(
x=omega_x_grid.flatten(),
y=omega_y_grid.flatten(),
z=omega_z_grid.flatten(),
value=normalize(ut_slice).flatten(),
opacity=0.5,
isomin=0.6,
isomax=0.9,
surface_count=3,
colorscale="Viridis",
),
row=1,
col=s+1
)
fig.update_layout(
scene_xaxis_title_text="omega_x",
scene_yaxis_title_text="omega_y",
scene_zaxis_title_text="omega_z",
scene2_xaxis_title_text="omega_x",
scene2_yaxis_title_text="omega_y",
scene2_zaxis_title_text="omega_z",
scene3_xaxis_title_text="omega_x",
scene3_yaxis_title_text="omega_y",
scene3_zaxis_title_text="omega_z",
)
pio.write_html(fig, filename)
def get_marble_path(u_denoised: np.ndarray,
domain_values: np.ndarray) -> np.ndarray:
"""
Gets the path of the marble.
"""
n = len(domain_values)
num_rows = u_denoised.shape[0]
marble_path = np.empty((num_rows, 3))
for row in range(num_rows):
ut_slice_cube = np.reshape(u_denoised[row, :], (n, n, n))
argmax = np.argmax(np.abs(ut_slice_cube))
[index_x, index_y, index_z] = np.unravel_index(argmax, (n, n, n))
marble_path[row][0] = domain_values[index_x]
marble_path[row][1] = domain_values[index_y]
marble_path[row][2] = domain_values[index_z]
return marble_path
def plot_marble_path(title: str, marble_path: np.ndarray,
filename: str) -> None:
| """
Plots the 3D path of the marble.
"""
print(f'Plotting marble path...')
fig = go.Figure()
fig.add_trace(
go.Scatter3d(
x=marble_path[:, 0], y=marble_path[:, 1], z=marble_path[:, 2],
mode="lines+markers",
line_color=COLOR1,
line_width=3,
)
)
fig.update_layout(
title_text=title,
)
pio.write_html(fig, filename) | identifier_body |
|
ship_parser.rs | (ApAmmo::new(
ammo["bulletDiametr"].as_f64().expect("Couldn't find bulletDiametr"),
ammo["alphaDamage"].as_f64().expect("Couldn't find alphaDamage"),
ammo["bulletDetonator"].as_f64().expect("Couldn't find bulletDetonator"),
ammo["bulletDetonatorThreshold"].as_f64().expect("Couldn't find bulletDetonatorThreshold"),
)),
ballistics,
)
} else if ammotype == "CS" {
warn!("Found unimplemented ammo type CS!");
Ammo::new(
AmmoType::He(HeAmmo::new(1.0, 1.0)), ballistics)
} else {
error!("Found unknown ammo type {}!", ammotype);
panic!()
}
}
fn parse_artillery(artillery_spec: &Map<String, Value>) -> Vec<Gun> {
//debug!("{:#?}", artillery_spec);
let guns = artillery_spec["guns"].as_object().unwrap();
/*for (key,gun) in guns {
debug!("{}: {:?}", key, gun);
}*/
let dispersion = Dispersion::new(
artillery_spec["minDistH"].as_f64().expect("Couldn't find horizontal"),
artillery_spec["minDistV"].as_f64().expect("Couldn't find vertical"),
artillery_spec["maxDist"].as_f64().expect("Couldn't find maxrange"),
artillery_spec["sigmaCount"].as_f64().expect("Couldn't find sigmaCount")
);
guns.iter().map(|(_, gun)| {
let ammo_list = gun["ammoList"].as_object().expect("Couldn't get ammoList");
//debug!("{}: {:#?}", key, gun);
let ammo: Vec<_> = ammo_list.iter().map(|(_, ammo)| {
parse_ammotype(ammo.as_object().unwrap())
}).collect();
Gun::new(
dispersion.clone(),
ammo,
)
}).collect()
}
#[derive(Deserialize)]
struct ArmorMaterial {
#[serde(alias = "type")]
armor_type: usize,
thickness: usize, // mm
}
#[derive(Deserialize)]
struct ArmorGroup {
material: String,
indices: Vec<usize>,
}
#[derive(Deserialize)]
struct ArmorObject {
vertices: Vec<f64>,
groups: Vec<ArmorGroup>,
}
#[derive(Deserialize)]
struct GeometryObject {
armor: ArmorObject,
}
#[derive(Deserialize)]
struct RawGeometry {
objects: GeometryObject,
materials: HashMap<String, ArmorMaterial>,
}
impl RawGeometry {
pub fn to_armor_faces(self, matrix: Matrix4<f64>) -> Vec<ArmorFace> {
let mut vertices = vec!();
for i in 0..self.objects.armor.vertices.len()/3 {
let pnt = Point3::new(
self.objects.armor.vertices[i*3+0],
self.objects.armor.vertices[i*3+1],
self.objects.armor.vertices[i*3+2],
);
let pnt = Point3::from_homogeneous(matrix * pnt.to_homogeneous());
vertices.push(Point3::new(pnt.x, pnt.y, pnt.z));
}
let mut faces = vec!();
for group in self.objects.armor.groups {
let material = &self.materials[&group.material];
for i in 0..group.indices.len()/3 {
faces.push(ArmorFace::new(
[
vertices[group.indices[i*3+0]],
vertices[group.indices[i*3+1]],
vertices[group.indices[i*3+2]],
],
material.thickness as f64,
ArmorType::from_id(material.armor_type),
));
}
}
faces
}
}
fn parse_armor(url: &str, hull_components: &Map<String, Value>) -> Vec<ArmorFace> {
let mut params = Map::new();
for (k,v) in hull_components {
debug!("Hull has component {}: {}", k, v);
params.insert(k.to_string(), v[0].clone());
}
let page = download_with_params(&url, "armor", &Value::Object(params).to_string());
let scheme: Vec<_> = page.lines().filter(|line| {
line.contains("var scheme")
}).collect();
if scheme.len() != 1 {
error!("Expected to find exactly one scheme variable! Found {}", scheme.len());
panic!();
}
let armor = scheme[0].split("=").skip(1).collect::<Vec<_>>().join("=");
let armor: Value = serde_json::from_str(&armor[1..armor.len()-1]).unwrap();
let mut faces = vec!();
for (_,v) in armor.as_object().unwrap() {
let url = format!("https://gamemodels3d.com/games/worldofwarships/data/current/armor/{}", v["model"].as_str().unwrap());
let model = download(&url);
if model.len() == 0 {
// Sometimes we get 404 for some reason
continue;
}
let mut m = [0.0; 16];
let transform = v["transform"].as_array().unwrap();
for i in 0..4 {
let col = transform[i].as_array().unwrap();
for j in 0..4 {
m[i*4 + j] = col[j].as_f64().expect(&format!("Couldn't get {}th element of column {}", j, i));
}
}
let m = Matrix4::new(
m[0*4 + 0],
m[0*4 + 1],
m[0*4 + 2],
m[0*4 + 3],
m[1*4 + 0],
m[1*4 + 1],
m[1*4 + 2],
m[1*4 + 3],
m[2*4 + 0],
m[2*4 + 1],
m[2*4 + 2],
m[2*4 + 3],
m[3*4 + 0],
m[3*4 + 1],
m[3*4 + 2],
m[3*4 + 3],
);
//debug!("Got matrix: {:?}", m);
let geometry: RawGeometry = serde_json::from_str(&model).unwrap();
faces.append(&mut geometry.to_armor_faces(m));
}
debug!("Mesh has {} faces", faces.len());
// Get the bounding box
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1./0., f64::max),
];
debug!("Bounding box: {:?} to {:?}", mins, maxs);
// Dump the mesh as a .obj to debug
{
let mut f = std::fs::File::create("test.obj").unwrap();
for face in faces.iter() {
for v in face.vertices.iter() {
f.write_all(format!("v {} {} {}\n", v.x, v.y, v.z).as_bytes()).unwrap();
}
}
for i in 0..faces.len() {
f.write_all(format!("f {} {} {}\n", i*3+1, i*3+2, i*3+3).as_bytes()).unwrap();
}
}
faces
}
fn | (faces: &Vec<ArmorFace>) -> [f64; 3] {
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1 | find_size | identifier_name |
ship_parser.rs | Ap(ApAmmo::new(
ammo["bulletDiametr"].as_f64().expect("Couldn't find bulletDiametr"),
ammo["alphaDamage"].as_f64().expect("Couldn't find alphaDamage"),
ammo["bulletDetonator"].as_f64().expect("Couldn't find bulletDetonator"),
ammo["bulletDetonatorThreshold"].as_f64().expect("Couldn't find bulletDetonatorThreshold"),
)),
ballistics,
)
} else if ammotype == "CS" {
warn!("Found unimplemented ammo type CS!");
Ammo::new(
AmmoType::He(HeAmmo::new(1.0, 1.0)), ballistics)
} else {
error!("Found unknown ammo type {}!", ammotype);
panic!()
}
}
fn parse_artillery(artillery_spec: &Map<String, Value>) -> Vec<Gun> {
//debug!("{:#?}", artillery_spec);
let guns = artillery_spec["guns"].as_object().unwrap();
/*for (key,gun) in guns {
debug!("{}: {:?}", key, gun);
}*/
let dispersion = Dispersion::new(
artillery_spec["minDistH"].as_f64().expect("Couldn't find horizontal"),
artillery_spec["minDistV"].as_f64().expect("Couldn't find vertical"),
artillery_spec["maxDist"].as_f64().expect("Couldn't find maxrange"),
artillery_spec["sigmaCount"].as_f64().expect("Couldn't find sigmaCount")
);
guns.iter().map(|(_, gun)| {
let ammo_list = gun["ammoList"].as_object().expect("Couldn't get ammoList");
//debug!("{}: {:#?}", key, gun);
let ammo: Vec<_> = ammo_list.iter().map(|(_, ammo)| {
parse_ammotype(ammo.as_object().unwrap())
}).collect();
Gun::new(
dispersion.clone(),
ammo,
)
}).collect()
}
#[derive(Deserialize)]
struct ArmorMaterial {
#[serde(alias = "type")]
armor_type: usize,
thickness: usize, // mm
}
#[derive(Deserialize)]
struct ArmorGroup {
material: String,
indices: Vec<usize>,
}
#[derive(Deserialize)]
struct ArmorObject {
vertices: Vec<f64>,
groups: Vec<ArmorGroup>,
}
#[derive(Deserialize)]
struct GeometryObject {
armor: ArmorObject,
}
#[derive(Deserialize)]
struct RawGeometry {
objects: GeometryObject,
materials: HashMap<String, ArmorMaterial>,
}
impl RawGeometry {
pub fn to_armor_faces(self, matrix: Matrix4<f64>) -> Vec<ArmorFace> {
let mut vertices = vec!();
for i in 0..self.objects.armor.vertices.len()/3 {
let pnt = Point3::new(
self.objects.armor.vertices[i*3+0],
self.objects.armor.vertices[i*3+1],
self.objects.armor.vertices[i*3+2],
);
let pnt = Point3::from_homogeneous(matrix * pnt.to_homogeneous());
vertices.push(Point3::new(pnt.x, pnt.y, pnt.z));
}
let mut faces = vec!();
for group in self.objects.armor.groups {
let material = &self.materials[&group.material];
for i in 0..group.indices.len()/3 {
faces.push(ArmorFace::new(
[
vertices[group.indices[i*3+0]],
vertices[group.indices[i*3+1]],
vertices[group.indices[i*3+2]],
],
material.thickness as f64,
ArmorType::from_id(material.armor_type),
));
}
}
faces
}
}
fn parse_armor(url: &str, hull_components: &Map<String, Value>) -> Vec<ArmorFace> {
let mut params = Map::new();
for (k,v) in hull_components {
debug!("Hull has component {}: {}", k, v);
params.insert(k.to_string(), v[0].clone());
}
let page = download_with_params(&url, "armor", &Value::Object(params).to_string());
let scheme: Vec<_> = page.lines().filter(|line| {
line.contains("var scheme")
}).collect();
if scheme.len() != 1 {
error!("Expected to find exactly one scheme variable! Found {}", scheme.len());
panic!();
}
let armor = scheme[0].split("=").skip(1).collect::<Vec<_>>().join("=");
let armor: Value = serde_json::from_str(&armor[1..armor.len()-1]).unwrap();
let mut faces = vec!();
for (_,v) in armor.as_object().unwrap() {
let url = format!("https://gamemodels3d.com/games/worldofwarships/data/current/armor/{}", v["model"].as_str().unwrap());
let model = download(&url);
if model.len() == 0 {
// Sometimes we get 404 for some reason
continue;
}
let mut m = [0.0; 16];
let transform = v["transform"].as_array().unwrap();
for i in 0..4 {
let col = transform[i].as_array().unwrap();
for j in 0..4 {
m[i*4 + j] = col[j].as_f64().expect(&format!("Couldn't get {}th element of column {}", j, i));
}
}
let m = Matrix4::new(
m[0*4 + 0],
m[0*4 + 1],
m[0*4 + 2],
m[0*4 + 3],
m[1*4 + 0],
m[1*4 + 1],
m[1*4 + 2],
m[1*4 + 3],
m[2*4 + 0],
m[2*4 + 1],
m[2*4 + 2],
m[2*4 + 3],
m[3*4 + 0],
m[3*4 + 1],
m[3*4 + 2],
m[3*4 + 3],
);
//debug!("Got matrix: {:?}", m);
let geometry: RawGeometry = serde_json::from_str(&model).unwrap();
faces.append(&mut geometry.to_armor_faces(m));
}
debug!("Mesh has {} faces", faces.len());
// Get the bounding box
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1./0., f64::max),
];
debug!("Bounding box: {:?} to {:?}", mins, maxs);
// Dump the mesh as a .obj to debug
{
let mut f = std::fs::File::create("test.obj").unwrap();
for face in faces.iter() {
for v in face.vertices.iter() {
f.write_all(format!("v {} {} {}\n", v.x, v.y, v.z).as_bytes()).unwrap();
}
}
for i in 0..faces.len() {
f.write_all(format!("f {} {} {}\n", i*3+1, i*3+2, i*3+3).as_bytes()).unwrap(); | }
faces
}
fn find_size(faces: &Vec<ArmorFace>) -> [f64; 3] {
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1 | } | random_line_split |
ship_parser.rs | else if ammotype == "AP" {
Ammo::new(
AmmoType::Ap(ApAmmo::new(
ammo["bulletDiametr"].as_f64().expect("Couldn't find bulletDiametr"),
ammo["alphaDamage"].as_f64().expect("Couldn't find alphaDamage"),
ammo["bulletDetonator"].as_f64().expect("Couldn't find bulletDetonator"),
ammo["bulletDetonatorThreshold"].as_f64().expect("Couldn't find bulletDetonatorThreshold"),
)),
ballistics,
)
} else if ammotype == "CS" {
warn!("Found unimplemented ammo type CS!");
Ammo::new(
AmmoType::He(HeAmmo::new(1.0, 1.0)), ballistics)
} else {
error!("Found unknown ammo type {}!", ammotype);
panic!()
}
}
fn parse_artillery(artillery_spec: &Map<String, Value>) -> Vec<Gun> {
//debug!("{:#?}", artillery_spec);
let guns = artillery_spec["guns"].as_object().unwrap();
/*for (key,gun) in guns {
debug!("{}: {:?}", key, gun);
}*/
let dispersion = Dispersion::new(
artillery_spec["minDistH"].as_f64().expect("Couldn't find horizontal"),
artillery_spec["minDistV"].as_f64().expect("Couldn't find vertical"),
artillery_spec["maxDist"].as_f64().expect("Couldn't find maxrange"),
artillery_spec["sigmaCount"].as_f64().expect("Couldn't find sigmaCount")
);
guns.iter().map(|(_, gun)| {
let ammo_list = gun["ammoList"].as_object().expect("Couldn't get ammoList");
//debug!("{}: {:#?}", key, gun);
let ammo: Vec<_> = ammo_list.iter().map(|(_, ammo)| {
parse_ammotype(ammo.as_object().unwrap())
}).collect();
Gun::new(
dispersion.clone(),
ammo,
)
}).collect()
}
#[derive(Deserialize)]
struct ArmorMaterial {
#[serde(alias = "type")]
armor_type: usize,
thickness: usize, // mm
}
#[derive(Deserialize)]
struct ArmorGroup {
material: String,
indices: Vec<usize>,
}
#[derive(Deserialize)]
struct ArmorObject {
vertices: Vec<f64>,
groups: Vec<ArmorGroup>,
}
#[derive(Deserialize)]
struct GeometryObject {
armor: ArmorObject,
}
#[derive(Deserialize)]
struct RawGeometry {
objects: GeometryObject,
materials: HashMap<String, ArmorMaterial>,
}
impl RawGeometry {
pub fn to_armor_faces(self, matrix: Matrix4<f64>) -> Vec<ArmorFace> {
let mut vertices = vec!();
for i in 0..self.objects.armor.vertices.len()/3 {
let pnt = Point3::new(
self.objects.armor.vertices[i*3+0],
self.objects.armor.vertices[i*3+1],
self.objects.armor.vertices[i*3+2],
);
let pnt = Point3::from_homogeneous(matrix * pnt.to_homogeneous());
vertices.push(Point3::new(pnt.x, pnt.y, pnt.z));
}
let mut faces = vec!();
for group in self.objects.armor.groups {
let material = &self.materials[&group.material];
for i in 0..group.indices.len()/3 {
faces.push(ArmorFace::new(
[
vertices[group.indices[i*3+0]],
vertices[group.indices[i*3+1]],
vertices[group.indices[i*3+2]],
],
material.thickness as f64,
ArmorType::from_id(material.armor_type),
));
}
}
faces
}
}
fn parse_armor(url: &str, hull_components: &Map<String, Value>) -> Vec<ArmorFace> {
let mut params = Map::new();
for (k,v) in hull_components {
debug!("Hull has component {}: {}", k, v);
params.insert(k.to_string(), v[0].clone());
}
let page = download_with_params(&url, "armor", &Value::Object(params).to_string());
let scheme: Vec<_> = page.lines().filter(|line| {
line.contains("var scheme")
}).collect();
if scheme.len() != 1 {
error!("Expected to find exactly one scheme variable! Found {}", scheme.len());
panic!();
}
let armor = scheme[0].split("=").skip(1).collect::<Vec<_>>().join("=");
let armor: Value = serde_json::from_str(&armor[1..armor.len()-1]).unwrap();
let mut faces = vec!();
for (_,v) in armor.as_object().unwrap() {
let url = format!("https://gamemodels3d.com/games/worldofwarships/data/current/armor/{}", v["model"].as_str().unwrap());
let model = download(&url);
if model.len() == 0 {
// Sometimes we get 404 for some reason
continue;
}
let mut m = [0.0; 16];
let transform = v["transform"].as_array().unwrap();
for i in 0..4 {
let col = transform[i].as_array().unwrap();
for j in 0..4 {
m[i*4 + j] = col[j].as_f64().expect(&format!("Couldn't get {}th element of column {}", j, i));
}
}
let m = Matrix4::new(
m[0*4 + 0],
m[0*4 + 1],
m[0*4 + 2],
m[0*4 + 3],
m[1*4 + 0],
m[1*4 + 1],
m[1*4 + 2],
m[1*4 + 3],
m[2*4 + 0],
m[2*4 + 1],
m[2*4 + 2],
m[2*4 + 3],
m[3*4 + 0],
m[3*4 + 1],
m[3*4 + 2],
m[3*4 + 3],
);
//debug!("Got matrix: {:?}", m);
let geometry: RawGeometry = serde_json::from_str(&model).unwrap();
faces.append(&mut geometry.to_armor_faces(m));
}
debug!("Mesh has {} faces", faces.len());
// Get the bounding box
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1./0., f64::max),
];
debug!("Bounding box: {:?} to {:?}", mins, maxs);
// Dump the mesh as a .obj to debug
{
let mut f = std::fs::File::create("test.obj").unwrap();
for face in faces.iter() {
for v in face.vertices.iter() {
f.write_all(format!("v {} {} {}\n", v.x, v.y, v.z).as_bytes()).unwrap();
}
}
for i in 0..faces.len() {
f.write_all(format!("f {} {} {}\n", i*3+1, i*3+2, i*3+3).as_bytes()).unwrap();
}
}
faces
}
fn find_size(faces: &Vec<ArmorFace>) -> [f64; 3] {
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| | {
Ammo::new(
AmmoType::He(HeAmmo::new(
ammo["alphaDamage"].as_f64().expect("Couldn't find alphaDamage"),
ammo["alphaPiercingHE"].as_f64().expect("Couldn't find alphaPiercingHE"),
)),
ballistics,
)
} | conditional_block |
|
ship_parser.rs | (ApAmmo::new(
ammo["bulletDiametr"].as_f64().expect("Couldn't find bulletDiametr"),
ammo["alphaDamage"].as_f64().expect("Couldn't find alphaDamage"),
ammo["bulletDetonator"].as_f64().expect("Couldn't find bulletDetonator"),
ammo["bulletDetonatorThreshold"].as_f64().expect("Couldn't find bulletDetonatorThreshold"),
)),
ballistics,
)
} else if ammotype == "CS" {
warn!("Found unimplemented ammo type CS!");
Ammo::new(
AmmoType::He(HeAmmo::new(1.0, 1.0)), ballistics)
} else {
error!("Found unknown ammo type {}!", ammotype);
panic!()
}
}
fn parse_artillery(artillery_spec: &Map<String, Value>) -> Vec<Gun> | ammo,
)
}).collect()
}
#[derive(Deserialize)]
struct ArmorMaterial {
#[serde(alias = "type")]
armor_type: usize,
thickness: usize, // mm
}
#[derive(Deserialize)]
struct ArmorGroup {
material: String,
indices: Vec<usize>,
}
#[derive(Deserialize)]
struct ArmorObject {
vertices: Vec<f64>,
groups: Vec<ArmorGroup>,
}
#[derive(Deserialize)]
struct GeometryObject {
armor: ArmorObject,
}
#[derive(Deserialize)]
struct RawGeometry {
objects: GeometryObject,
materials: HashMap<String, ArmorMaterial>,
}
impl RawGeometry {
pub fn to_armor_faces(self, matrix: Matrix4<f64>) -> Vec<ArmorFace> {
let mut vertices = vec!();
for i in 0..self.objects.armor.vertices.len()/3 {
let pnt = Point3::new(
self.objects.armor.vertices[i*3+0],
self.objects.armor.vertices[i*3+1],
self.objects.armor.vertices[i*3+2],
);
let pnt = Point3::from_homogeneous(matrix * pnt.to_homogeneous());
vertices.push(Point3::new(pnt.x, pnt.y, pnt.z));
}
let mut faces = vec!();
for group in self.objects.armor.groups {
let material = &self.materials[&group.material];
for i in 0..group.indices.len()/3 {
faces.push(ArmorFace::new(
[
vertices[group.indices[i*3+0]],
vertices[group.indices[i*3+1]],
vertices[group.indices[i*3+2]],
],
material.thickness as f64,
ArmorType::from_id(material.armor_type),
));
}
}
faces
}
}
fn parse_armor(url: &str, hull_components: &Map<String, Value>) -> Vec<ArmorFace> {
let mut params = Map::new();
for (k,v) in hull_components {
debug!("Hull has component {}: {}", k, v);
params.insert(k.to_string(), v[0].clone());
}
let page = download_with_params(&url, "armor", &Value::Object(params).to_string());
let scheme: Vec<_> = page.lines().filter(|line| {
line.contains("var scheme")
}).collect();
if scheme.len() != 1 {
error!("Expected to find exactly one scheme variable! Found {}", scheme.len());
panic!();
}
let armor = scheme[0].split("=").skip(1).collect::<Vec<_>>().join("=");
let armor: Value = serde_json::from_str(&armor[1..armor.len()-1]).unwrap();
let mut faces = vec!();
for (_,v) in armor.as_object().unwrap() {
let url = format!("https://gamemodels3d.com/games/worldofwarships/data/current/armor/{}", v["model"].as_str().unwrap());
let model = download(&url);
if model.len() == 0 {
// Sometimes we get 404 for some reason
continue;
}
let mut m = [0.0; 16];
let transform = v["transform"].as_array().unwrap();
for i in 0..4 {
let col = transform[i].as_array().unwrap();
for j in 0..4 {
m[i*4 + j] = col[j].as_f64().expect(&format!("Couldn't get {}th element of column {}", j, i));
}
}
let m = Matrix4::new(
m[0*4 + 0],
m[0*4 + 1],
m[0*4 + 2],
m[0*4 + 3],
m[1*4 + 0],
m[1*4 + 1],
m[1*4 + 2],
m[1*4 + 3],
m[2*4 + 0],
m[2*4 + 1],
m[2*4 + 2],
m[2*4 + 3],
m[3*4 + 0],
m[3*4 + 1],
m[3*4 + 2],
m[3*4 + 3],
);
//debug!("Got matrix: {:?}", m);
let geometry: RawGeometry = serde_json::from_str(&model).unwrap();
faces.append(&mut geometry.to_armor_faces(m));
}
debug!("Mesh has {} faces", faces.len());
// Get the bounding box
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1./0., f64::max),
];
debug!("Bounding box: {:?} to {:?}", mins, maxs);
// Dump the mesh as a .obj to debug
{
let mut f = std::fs::File::create("test.obj").unwrap();
for face in faces.iter() {
for v in face.vertices.iter() {
f.write_all(format!("v {} {} {}\n", v.x, v.y, v.z).as_bytes()).unwrap();
}
}
for i in 0..faces.len() {
f.write_all(format!("f {} {} {}\n", i*3+1, i*3+2, i*3+3).as_bytes()).unwrap();
}
}
faces
}
fn find_size(faces: &Vec<ArmorFace>) -> [f64; 3] {
let mins = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(1./0., f64::min),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(1./0., f64::min),
];
let maxs = [
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.x}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.y}).fold(-1./0., f64::max),
faces.iter().map(|face| { face.vertices.iter() }).flatten().map(|p| {p.z}).fold(-1 | {
//debug!("{:#?}", artillery_spec);
let guns = artillery_spec["guns"].as_object().unwrap();
/*for (key,gun) in guns {
debug!("{}: {:?}", key, gun);
}*/
let dispersion = Dispersion::new(
artillery_spec["minDistH"].as_f64().expect("Couldn't find horizontal"),
artillery_spec["minDistV"].as_f64().expect("Couldn't find vertical"),
artillery_spec["maxDist"].as_f64().expect("Couldn't find maxrange"),
artillery_spec["sigmaCount"].as_f64().expect("Couldn't find sigmaCount")
);
guns.iter().map(|(_, gun)| {
let ammo_list = gun["ammoList"].as_object().expect("Couldn't get ammoList");
//debug!("{}: {:#?}", key, gun);
let ammo: Vec<_> = ammo_list.iter().map(|(_, ammo)| {
parse_ammotype(ammo.as_object().unwrap())
}).collect();
Gun::new(
dispersion.clone(), | identifier_body |
main.py | i.isnumeric() and previous_character.isnumeric():
newword += i
else:
groups.append(newword)
newword = i
previous_character = i
if x == len(string) - 2:
groups.append(newword)
newword = ''
buf=[]
next_idx=0
for i in range(len(groups)):
if i<len(groups)-1:
if next_idx==i and groups[i+1]!=".":
buf.append(groups[i])
next_idx=next_idx+1
if next_idx==i and groups[i+1]==".":
b_str=groups[i]+groups[i+1]+groups[i+2]
buf.append(b_str)
next_idx=i+3
if i==len(groups)-1:
if groups[i-1]!=".":
buf.append(groups[i])
return buf
pte_adapted="PTE_adapted.txt"
model_json="network.json"
model_h5="network.h5"
min_temp=0
max_temp=999
defalut_result_name="/results.txt"
result_path="results.txt"
source_path="source.txt"
target_size=10#num of position for elements
num_elements=17# params of element in formula
class SupercoNN_class(QDialog):
error_flag=False
def __init__(self):
super(SupercoNN_class, self).__init__()
loadUi('GUI_window.ui', self)
#init text fields
self.setWindowTitle('SupercoNN')
self.Source_file_lineEdit.setText(source_path)
self.Result_path_lineEdit.setText(result_path)
self.Min_temp_lineEdit.setText(str(min_temp))
self.Max_temp_lineEdit.setText(str(max_temp))
#init signals and slots
self.Set_result_path.clicked.connect(self.set_result_directory_path)
self.Set_source_file.clicked.connect(self.set_source_file_path)
self.Start_btn.clicked.connect(self.Start_function)
@pyqtSlot()
def set_result_directory_path(self):
result_path = str(QFileDialog.getExistingDirectory(self, "Select Directory for Result File"))
if result_path!="":
result_path=result_path+defalut_result_name
self.Result_path_lineEdit.setText(result_path)
def set_source_file_path(self):
|
def Start_function(self):
#init variables
elements=[]
formulas=[]
fromulas_processed=[]
nums_custom_formulas=0
#----------------------------
self.log_listWidget.addItem("--INFO-- Files checking")
#set temperatures
try:
min_temp=float(self.Min_temp_lineEdit.text())
if min_temp<0:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperature can not be smaller than 0")
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperanure must be number")
try:
max_temp=float(self.Max_temp_lineEdit.text())
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Maximum temperanure must be number")
#------------------------------------------------
#read cemestry table file
try:
csv_chemestry_table=open(pte_adapted, newline='')
reader_chemestry_table = csv.reader(csv_chemestry_table, delimiter=',', quotechar='|')
#reading chemestry table
next(reader_chemestry_table)#drop headers
for i in range(86):
read_row = next(reader_chemestry_table)
for j in range(len(read_row)):
read_row[j]=float(read_row[j])
elements.append(read_row)
csv_chemestry_table.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find file with chemestry table.")
self.log_listWidget.addItem("--ERROR-- File PTE_adapted.txt must be in defalut folder.")
self.log_listWidget.addItem("--ERROR-- Also you can download it from git:https://github.com/devdimit93/SupercoNN ")
#---------------------------------------------
#prepare result file
try:
result_path=str(self.Result_path_lineEdit.text())
result_cs_writer=open(result_path, 'a', newline='')
result_writer = csv.writer(result_cs_writer, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not open path for result file.")
self.log_listWidget.addItem("--ERROR-- Check path to result file")
#--------------------------------------------------
#reading and processing of source file
try:
source_path=str(self.Source_file_lineEdit.text())
csv_custom=open(source_path, newline='')
reader_custom = csv.reader(csv_custom, delimiter=',', quotechar='|')
element_names=["H","He","Li","Be","B","C","N", "O","F", "Ne",
"Na","Mg","Al","Si","P","S", "Cl","Ar",
"K","Ca","Sc","Ti","V","Cr","Mn","Fe", "Co","Ni", "Cu","Zn","Ga","Ge","As","Se","Br","Kr",
"Rb","Sr", "Y","Zr","Nb","Mo","Tc","Ru","Rh","Pd", "Ag","Cd", "In","Sn","Sb","Te","I","Xe",
"Cs","Ba", "La",
"Ce", "Pr","Nd","Pm", "Sm","Eu","Gd","Tb","Dy","Ho","Er","Tm", "Yb","Lu",
"Hf","Ta", "W","Re","Os","Ir","Pt","Au","Hg","Ti", "Pb","Bi", "Po","At","Rn"
]
rows=[]
read_it=True
while(read_it==True):
try:
read_row = next(reader_custom)
rows.append(read_row)
nums_custom_formulas=nums_custom_formulas+1
except StopIteration:
read_it=False
processed_rows=[]
for i in range(len(rows)):
buf=seperate_string_number(rows[i][0])
processed_rows.append(buf)
for i in range(len(processed_rows)):#formulas
elem_buf=[]
order=[]
for j in range(len(processed_rows[i])):#elements
for k in range(len(element_names)):#parametrs
if processed_rows[i][j]==element_names[k]:
order.append(k)
buf=[]
if j!=len(processed_rows[i])-1:
buf.append(float(processed_rows[i][j+1]))#coefficient
else:
buf.append(float(1))#
for f in range(len(elements[85])):
buf.append(float(elements[k][f]))
buf=np.asarray(buf)
elem_buf.append(buf)
#sort by atomic number
sorted_elem_buf=[]
for i in range(len(order)):
min_index = order.index(min(order))
sorted_elem_buf.append(elem_buf[min_index])
order[min_index]=999
sorted_elem_buf=np.asarray(sorted_elem_buf)
formulas.append(sorted_elem_buf)#elem_buf is transformed formula
formulas=np.asarray(formulas)
#formulas processing. stage2
#expansion formulas to size 10*17. 10*17 - size of neural network input.
#10 elements in formula, every element has 16 parametrs and 1 coefficient
add_arr=[]# herea will be abstract element. all parametrs of its is zero
for i in range(num_elements):
add_arr.append(0)
add_arr=np.asarray(add_arr)
for i in range(formulas.shape[0]):
dist=target_size-formulas[i].shape[0]
buf1=[]
if dist>0:
for j in range(dist):
buf1.append(add_arr)
for j in range(formulas[i].shape[0]):
buf1.append(formulas[i][j])
buf1=np.asarray(buf1)
fromulas_processed.append(buf1)
fromulas_processed=np.asarray(fromulas_processed)
csv_custom.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not find source file.")
self.log_listWidget.addItem("--ERROR-- Check path to source file.")
#--------------------------------------------------------
#prepare model. reading model files
try:
self.log_listWidget.addItem("--INFO-- Reading model files. Please wait some time")
json_file = open(model_json, "r")
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find json file of the pretrained model ")
self.log_listWidget.addItem("--ERROR-- You can download this file from git:https://github.com/devdimit93/SupercoNN/pretrained_nn")
self.log_listWidget.addItem("--ERROR-- Rename downloaded file into network.json ")
try:
model.load_weights(model_h5)
except OSError:
self.error_flag=True
self.log_list | source_path = QFileDialog.getOpenFileName(self, "Select Source File", "", "Text Files (*.txt *.csv)")
if source_path[0]!="":
source_path=source_path[0]
self.Source_file_lineEdit.setText(source_path) | identifier_body |
main.py | i.isnumeric() and previous_character.isnumeric():
newword += i
else:
groups.append(newword)
newword = i
previous_character = i
if x == len(string) - 2:
groups.append(newword)
newword = ''
buf=[]
next_idx=0
for i in range(len(groups)):
if i<len(groups)-1:
if next_idx==i and groups[i+1]!=".":
buf.append(groups[i])
next_idx=next_idx+1
if next_idx==i and groups[i+1]==".":
b_str=groups[i]+groups[i+1]+groups[i+2]
buf.append(b_str)
next_idx=i+3
if i==len(groups)-1:
if groups[i-1]!=".":
buf.append(groups[i])
return buf
pte_adapted="PTE_adapted.txt"
model_json="network.json"
model_h5="network.h5"
min_temp=0
max_temp=999
defalut_result_name="/results.txt"
result_path="results.txt"
source_path="source.txt"
target_size=10#num of position for elements
num_elements=17# params of element in formula
class SupercoNN_class(QDialog):
error_flag=False
def __init__(self):
super(SupercoNN_class, self).__init__()
loadUi('GUI_window.ui', self)
#init text fields
self.setWindowTitle('SupercoNN')
self.Source_file_lineEdit.setText(source_path)
self.Result_path_lineEdit.setText(result_path)
self.Min_temp_lineEdit.setText(str(min_temp))
self.Max_temp_lineEdit.setText(str(max_temp))
#init signals and slots
self.Set_result_path.clicked.connect(self.set_result_directory_path)
self.Set_source_file.clicked.connect(self.set_source_file_path)
self.Start_btn.clicked.connect(self.Start_function)
@pyqtSlot()
def set_result_directory_path(self):
result_path = str(QFileDialog.getExistingDirectory(self, "Select Directory for Result File"))
if result_path!="":
result_path=result_path+defalut_result_name
self.Result_path_lineEdit.setText(result_path)
def set_source_file_path(self):
source_path = QFileDialog.getOpenFileName(self, "Select Source File", "", "Text Files (*.txt *.csv)")
if source_path[0]!="":
source_path=source_path[0]
self.Source_file_lineEdit.setText(source_path)
def Start_function(self):
#init variables
elements=[]
formulas=[]
fromulas_processed=[]
nums_custom_formulas=0
#----------------------------
self.log_listWidget.addItem("--INFO-- Files checking")
#set temperatures
try:
min_temp=float(self.Min_temp_lineEdit.text())
if min_temp<0:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperature can not be smaller than 0")
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperanure must be number")
try:
max_temp=float(self.Max_temp_lineEdit.text())
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Maximum temperanure must be number")
#------------------------------------------------
#read cemestry table file
try:
csv_chemestry_table=open(pte_adapted, newline='')
reader_chemestry_table = csv.reader(csv_chemestry_table, delimiter=',', quotechar='|')
#reading chemestry table
next(reader_chemestry_table)#drop headers
for i in range(86):
|
csv_chemestry_table.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find file with chemestry table.")
self.log_listWidget.addItem("--ERROR-- File PTE_adapted.txt must be in defalut folder.")
self.log_listWidget.addItem("--ERROR-- Also you can download it from git:https://github.com/devdimit93/SupercoNN ")
#---------------------------------------------
#prepare result file
try:
result_path=str(self.Result_path_lineEdit.text())
result_cs_writer=open(result_path, 'a', newline='')
result_writer = csv.writer(result_cs_writer, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not open path for result file.")
self.log_listWidget.addItem("--ERROR-- Check path to result file")
#--------------------------------------------------
#reading and processing of source file
try:
source_path=str(self.Source_file_lineEdit.text())
csv_custom=open(source_path, newline='')
reader_custom = csv.reader(csv_custom, delimiter=',', quotechar='|')
element_names=["H","He","Li","Be","B","C","N", "O","F", "Ne",
"Na","Mg","Al","Si","P","S", "Cl","Ar",
"K","Ca","Sc","Ti","V","Cr","Mn","Fe", "Co","Ni", "Cu","Zn","Ga","Ge","As","Se","Br","Kr",
"Rb","Sr", "Y","Zr","Nb","Mo","Tc","Ru","Rh","Pd", "Ag","Cd", "In","Sn","Sb","Te","I","Xe",
"Cs","Ba", "La",
"Ce", "Pr","Nd","Pm", "Sm","Eu","Gd","Tb","Dy","Ho","Er","Tm", "Yb","Lu",
"Hf","Ta", "W","Re","Os","Ir","Pt","Au","Hg","Ti", "Pb","Bi", "Po","At","Rn"
]
rows=[]
read_it=True
while(read_it==True):
try:
read_row = next(reader_custom)
rows.append(read_row)
nums_custom_formulas=nums_custom_formulas+1
except StopIteration:
read_it=False
processed_rows=[]
for i in range(len(rows)):
buf=seperate_string_number(rows[i][0])
processed_rows.append(buf)
for i in range(len(processed_rows)):#formulas
elem_buf=[]
order=[]
for j in range(len(processed_rows[i])):#elements
for k in range(len(element_names)):#parametrs
if processed_rows[i][j]==element_names[k]:
order.append(k)
buf=[]
if j!=len(processed_rows[i])-1:
buf.append(float(processed_rows[i][j+1]))#coefficient
else:
buf.append(float(1))#
for f in range(len(elements[85])):
buf.append(float(elements[k][f]))
buf=np.asarray(buf)
elem_buf.append(buf)
#sort by atomic number
sorted_elem_buf=[]
for i in range(len(order)):
min_index = order.index(min(order))
sorted_elem_buf.append(elem_buf[min_index])
order[min_index]=999
sorted_elem_buf=np.asarray(sorted_elem_buf)
formulas.append(sorted_elem_buf)#elem_buf is transformed formula
formulas=np.asarray(formulas)
#formulas processing. stage2
#expansion formulas to size 10*17. 10*17 - size of neural network input.
#10 elements in formula, every element has 16 parametrs and 1 coefficient
add_arr=[]# herea will be abstract element. all parametrs of its is zero
for i in range(num_elements):
add_arr.append(0)
add_arr=np.asarray(add_arr)
for i in range(formulas.shape[0]):
dist=target_size-formulas[i].shape[0]
buf1=[]
if dist>0:
for j in range(dist):
buf1.append(add_arr)
for j in range(formulas[i].shape[0]):
buf1.append(formulas[i][j])
buf1=np.asarray(buf1)
fromulas_processed.append(buf1)
fromulas_processed=np.asarray(fromulas_processed)
csv_custom.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not find source file.")
self.log_listWidget.addItem("--ERROR-- Check path to source file.")
#--------------------------------------------------------
#prepare model. reading model files
try:
self.log_listWidget.addItem("--INFO-- Reading model files. Please wait some time")
json_file = open(model_json, "r")
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find json file of the pretrained model ")
self.log_listWidget.addItem("--ERROR-- You can download this file from git:https://github.com/devdimit93/SupercoNN/pretrained_nn")
self.log_listWidget.addItem("--ERROR-- Rename downloaded file into network.json ")
try:
model.load_weights(model_h5)
except OSError:
self.error_flag=True
self.log | read_row = next(reader_chemestry_table)
for j in range(len(read_row)):
read_row[j]=float(read_row[j])
elements.append(read_row) | conditional_block |
main.py | (string):
previous_character = string[0]
groups = []
newword = string[0]
for x, i in enumerate(string[1:]):
if i.isalpha() and previous_character.isalpha():
newword += i
elif i.isnumeric() and previous_character.isnumeric():
newword += i
else:
groups.append(newword)
newword = i
previous_character = i
if x == len(string) - 2:
groups.append(newword)
newword = ''
buf=[]
next_idx=0
for i in range(len(groups)):
if i<len(groups)-1:
if next_idx==i and groups[i+1]!=".":
buf.append(groups[i])
next_idx=next_idx+1
if next_idx==i and groups[i+1]==".":
b_str=groups[i]+groups[i+1]+groups[i+2]
buf.append(b_str)
next_idx=i+3
if i==len(groups)-1:
if groups[i-1]!=".":
buf.append(groups[i])
return buf
pte_adapted="PTE_adapted.txt"
model_json="network.json"
model_h5="network.h5"
min_temp=0
max_temp=999
defalut_result_name="/results.txt"
result_path="results.txt"
source_path="source.txt"
target_size=10#num of position for elements
num_elements=17# params of element in formula
class SupercoNN_class(QDialog):
error_flag=False
def __init__(self):
super(SupercoNN_class, self).__init__()
loadUi('GUI_window.ui', self)
#init text fields
self.setWindowTitle('SupercoNN')
self.Source_file_lineEdit.setText(source_path)
self.Result_path_lineEdit.setText(result_path)
self.Min_temp_lineEdit.setText(str(min_temp))
self.Max_temp_lineEdit.setText(str(max_temp))
#init signals and slots
self.Set_result_path.clicked.connect(self.set_result_directory_path)
self.Set_source_file.clicked.connect(self.set_source_file_path)
self.Start_btn.clicked.connect(self.Start_function)
@pyqtSlot()
def set_result_directory_path(self):
result_path = str(QFileDialog.getExistingDirectory(self, "Select Directory for Result File"))
if result_path!="":
result_path=result_path+defalut_result_name
self.Result_path_lineEdit.setText(result_path)
def set_source_file_path(self):
source_path = QFileDialog.getOpenFileName(self, "Select Source File", "", "Text Files (*.txt *.csv)")
if source_path[0]!="":
source_path=source_path[0]
self.Source_file_lineEdit.setText(source_path)
def Start_function(self):
#init variables
elements=[]
formulas=[]
fromulas_processed=[]
nums_custom_formulas=0
#----------------------------
self.log_listWidget.addItem("--INFO-- Files checking")
#set temperatures
try:
min_temp=float(self.Min_temp_lineEdit.text())
if min_temp<0:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperature can not be smaller than 0")
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperanure must be number")
try:
max_temp=float(self.Max_temp_lineEdit.text())
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Maximum temperanure must be number")
#------------------------------------------------
#read cemestry table file
try:
csv_chemestry_table=open(pte_adapted, newline='')
reader_chemestry_table = csv.reader(csv_chemestry_table, delimiter=',', quotechar='|')
#reading chemestry table
next(reader_chemestry_table)#drop headers
for i in range(86):
read_row = next(reader_chemestry_table)
for j in range(len(read_row)):
read_row[j]=float(read_row[j])
elements.append(read_row)
csv_chemestry_table.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find file with chemestry table.")
self.log_listWidget.addItem("--ERROR-- File PTE_adapted.txt must be in defalut folder.")
self.log_listWidget.addItem("--ERROR-- Also you can download it from git:https://github.com/devdimit93/SupercoNN ")
#---------------------------------------------
#prepare result file
try:
result_path=str(self.Result_path_lineEdit.text())
result_cs_writer=open(result_path, 'a', newline='')
result_writer = csv.writer(result_cs_writer, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not open path for result file.")
self.log_listWidget.addItem("--ERROR-- Check path to result file")
#--------------------------------------------------
#reading and processing of source file
try:
source_path=str(self.Source_file_lineEdit.text())
csv_custom=open(source_path, newline='')
reader_custom = csv.reader(csv_custom, delimiter=',', quotechar='|')
element_names=["H","He","Li","Be","B","C","N", "O","F", "Ne",
"Na","Mg","Al","Si","P","S", "Cl","Ar",
"K","Ca","Sc","Ti","V","Cr","Mn","Fe", "Co","Ni", "Cu","Zn","Ga","Ge","As","Se","Br","Kr",
"Rb","Sr", "Y","Zr","Nb","Mo","Tc","Ru","Rh","Pd", "Ag","Cd", "In","Sn","Sb","Te","I","Xe",
"Cs","Ba", "La",
"Ce", "Pr","Nd","Pm", "Sm","Eu","Gd","Tb","Dy","Ho","Er","Tm", "Yb","Lu",
"Hf","Ta", "W","Re","Os","Ir","Pt","Au","Hg","Ti", "Pb","Bi", "Po","At","Rn"
]
rows=[]
read_it=True
while(read_it==True):
try:
read_row = next(reader_custom)
rows.append(read_row)
nums_custom_formulas=nums_custom_formulas+1
except StopIteration:
read_it=False
processed_rows=[]
for i in range(len(rows)):
buf=seperate_string_number(rows[i][0])
processed_rows.append(buf)
for i in range(len(processed_rows)):#formulas
elem_buf=[]
order=[]
for j in range(len(processed_rows[i])):#elements
for k in range(len(element_names)):#parametrs
if processed_rows[i][j]==element_names[k]:
order.append(k)
buf=[]
if j!=len(processed_rows[i])-1:
buf.append(float(processed_rows[i][j+1]))#coefficient
else:
buf.append(float(1))#
for f in range(len(elements[85])):
buf.append(float(elements[k][f]))
buf=np.asarray(buf)
elem_buf.append(buf)
#sort by atomic number
sorted_elem_buf=[]
for i in range(len(order)):
min_index = order.index(min(order))
sorted_elem_buf.append(elem_buf[min_index])
order[min_index]=999
sorted_elem_buf=np.asarray(sorted_elem_buf)
formulas.append(sorted_elem_buf)#elem_buf is transformed formula
formulas=np.asarray(formulas)
#formulas processing. stage2
#expansion formulas to size 10*17. 10*17 - size of neural network input.
#10 elements in formula, every element has 16 parametrs and 1 coefficient
add_arr=[]# herea will be abstract element. all parametrs of its is zero
for i in range(num_elements):
add_arr.append(0)
add_arr=np.asarray(add_arr)
for i in range(formulas.shape[0]):
dist=target_size-formulas[i].shape[0]
buf1=[]
if dist>0:
for j in range(dist):
buf1.append(add_arr)
for j in range(formulas[i].shape[0]):
buf1.append(formulas[i][j])
buf1=np.asarray(buf1)
fromulas_processed.append(buf1)
fromulas_processed=np.asarray(fromulas_processed)
csv_custom.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not find source file.")
self.log_listWidget.addItem("--ERROR-- Check path to source file.")
#--------------------------------------------------------
#prepare model. reading model files
try:
self.log_listWidget.addItem("--INFO-- Reading model files. Please wait some time")
json_file = open(model_json, "r")
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find json file of the pretrained model ")
self.log_listWidget.addItem("--ERROR-- You can download this file from git:https | seperate_string_number | identifier_name |
|
main.py | elif i.isnumeric() and previous_character.isnumeric():
newword += i
else:
groups.append(newword)
newword = i
previous_character = i
if x == len(string) - 2:
groups.append(newword)
newword = ''
buf=[]
next_idx=0
for i in range(len(groups)):
if i<len(groups)-1:
if next_idx==i and groups[i+1]!=".":
buf.append(groups[i])
next_idx=next_idx+1
if next_idx==i and groups[i+1]==".":
b_str=groups[i]+groups[i+1]+groups[i+2]
buf.append(b_str)
next_idx=i+3
if i==len(groups)-1:
if groups[i-1]!=".":
buf.append(groups[i])
return buf
pte_adapted="PTE_adapted.txt"
model_json="network.json"
model_h5="network.h5"
min_temp=0
max_temp=999
defalut_result_name="/results.txt"
result_path="results.txt"
source_path="source.txt"
target_size=10#num of position for elements
num_elements=17# params of element in formula
class SupercoNN_class(QDialog):
error_flag=False
def __init__(self):
super(SupercoNN_class, self).__init__()
loadUi('GUI_window.ui', self)
#init text fields
self.setWindowTitle('SupercoNN')
self.Source_file_lineEdit.setText(source_path)
self.Result_path_lineEdit.setText(result_path)
self.Min_temp_lineEdit.setText(str(min_temp))
self.Max_temp_lineEdit.setText(str(max_temp))
#init signals and slots
self.Set_result_path.clicked.connect(self.set_result_directory_path)
self.Set_source_file.clicked.connect(self.set_source_file_path)
self.Start_btn.clicked.connect(self.Start_function)
@pyqtSlot()
def set_result_directory_path(self):
result_path = str(QFileDialog.getExistingDirectory(self, "Select Directory for Result File"))
if result_path!="":
result_path=result_path+defalut_result_name
self.Result_path_lineEdit.setText(result_path)
def set_source_file_path(self):
source_path = QFileDialog.getOpenFileName(self, "Select Source File", "", "Text Files (*.txt *.csv)")
if source_path[0]!="":
source_path=source_path[0]
self.Source_file_lineEdit.setText(source_path)
def Start_function(self):
#init variables
elements=[]
formulas=[]
fromulas_processed=[]
nums_custom_formulas=0
#----------------------------
self.log_listWidget.addItem("--INFO-- Files checking")
#set temperatures
try:
min_temp=float(self.Min_temp_lineEdit.text())
if min_temp<0:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperature can not be smaller than 0")
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Minimum temperanure must be number")
try:
max_temp=float(self.Max_temp_lineEdit.text())
except ValueError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Maximum temperanure must be number")
#------------------------------------------------
#read cemestry table file
try:
csv_chemestry_table=open(pte_adapted, newline='')
reader_chemestry_table = csv.reader(csv_chemestry_table, delimiter=',', quotechar='|')
#reading chemestry table
next(reader_chemestry_table)#drop headers
for i in range(86):
read_row = next(reader_chemestry_table)
for j in range(len(read_row)):
read_row[j]=float(read_row[j])
elements.append(read_row)
csv_chemestry_table.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find file with chemestry table.")
self.log_listWidget.addItem("--ERROR-- File PTE_adapted.txt must be in defalut folder.")
self.log_listWidget.addItem("--ERROR-- Also you can download it from git:https://github.com/devdimit93/SupercoNN ")
#---------------------------------------------
#prepare result file
|
try:
result_path=str(self.Result_path_lineEdit.text())
result_cs_writer=open(result_path, 'a', newline='')
result_writer = csv.writer(result_cs_writer, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not open path for result file.")
self.log_listWidget.addItem("--ERROR-- Check path to result file")
#--------------------------------------------------
#reading and processing of source file
try:
source_path=str(self.Source_file_lineEdit.text())
csv_custom=open(source_path, newline='')
reader_custom = csv.reader(csv_custom, delimiter=',', quotechar='|')
element_names=["H","He","Li","Be","B","C","N", "O","F", "Ne",
"Na","Mg","Al","Si","P","S", "Cl","Ar",
"K","Ca","Sc","Ti","V","Cr","Mn","Fe", "Co","Ni", "Cu","Zn","Ga","Ge","As","Se","Br","Kr",
"Rb","Sr", "Y","Zr","Nb","Mo","Tc","Ru","Rh","Pd", "Ag","Cd", "In","Sn","Sb","Te","I","Xe",
"Cs","Ba", "La",
"Ce", "Pr","Nd","Pm", "Sm","Eu","Gd","Tb","Dy","Ho","Er","Tm", "Yb","Lu",
"Hf","Ta", "W","Re","Os","Ir","Pt","Au","Hg","Ti", "Pb","Bi", "Po","At","Rn"
]
rows=[]
read_it=True
while(read_it==True):
try:
read_row = next(reader_custom)
rows.append(read_row)
nums_custom_formulas=nums_custom_formulas+1
except StopIteration:
read_it=False
processed_rows=[]
for i in range(len(rows)):
buf=seperate_string_number(rows[i][0])
processed_rows.append(buf)
for i in range(len(processed_rows)):#formulas
elem_buf=[]
order=[]
for j in range(len(processed_rows[i])):#elements
for k in range(len(element_names)):#parametrs
if processed_rows[i][j]==element_names[k]:
order.append(k)
buf=[]
if j!=len(processed_rows[i])-1:
buf.append(float(processed_rows[i][j+1]))#coefficient
else:
buf.append(float(1))#
for f in range(len(elements[85])):
buf.append(float(elements[k][f]))
buf=np.asarray(buf)
elem_buf.append(buf)
#sort by atomic number
sorted_elem_buf=[]
for i in range(len(order)):
min_index = order.index(min(order))
sorted_elem_buf.append(elem_buf[min_index])
order[min_index]=999
sorted_elem_buf=np.asarray(sorted_elem_buf)
formulas.append(sorted_elem_buf)#elem_buf is transformed formula
formulas=np.asarray(formulas)
#formulas processing. stage2
#expansion formulas to size 10*17. 10*17 - size of neural network input.
#10 elements in formula, every element has 16 parametrs and 1 coefficient
add_arr=[]# herea will be abstract element. all parametrs of its is zero
for i in range(num_elements):
add_arr.append(0)
add_arr=np.asarray(add_arr)
for i in range(formulas.shape[0]):
dist=target_size-formulas[i].shape[0]
buf1=[]
if dist>0:
for j in range(dist):
buf1.append(add_arr)
for j in range(formulas[i].shape[0]):
buf1.append(formulas[i][j])
buf1=np.asarray(buf1)
fromulas_processed.append(buf1)
fromulas_processed=np.asarray(fromulas_processed)
csv_custom.close()
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Programm can not find source file.")
self.log_listWidget.addItem("--ERROR-- Check path to source file.")
#--------------------------------------------------------
#prepare model. reading model files
try:
self.log_listWidget.addItem("--INFO-- Reading model files. Please wait some time")
json_file = open(model_json, "r")
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
except FileNotFoundError:
self.error_flag=True
self.log_listWidget.addItem("--ERROR-- Progam can not find json file of the pretrained model ")
self.log_listWidget.addItem("--ERROR-- You can download this file from git:https://github.com/devdimit93/SupercoNN/pretrained_nn")
self.log_listWidget.addItem("--ERROR-- Rename downloaded file into network.json ")
try:
model.load_weights(model_h5)
except OSError:
self.error_flag=True
self | random_line_split |
|
LoginForm.js | ').width;
const ANIMATION_DURATION = 500;
class LoginForm extends Component {
constructor(props) {
super(props);
this.emailRef = this.updateRef.bind(this, 'email');
this.passwordRef = this.updateRef.bind(this, 'password');
const buttonOpacityValue = new Animated.Value(0); // declare animated value
const titlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
const subTitlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
this.state = {
signUpModalVisible: false,
dialogState: modalMessages.noEmail,
signInEnabled: false,
secureTextEntry: true,
titlePosition: titlePositionValue,
subTitlePosition: subTitlePositionValue,
buttonOpacity: buttonOpacityValue,
errors: {}
};
}
componentDidMount() {
Animated.parallel([
Animated.timing(this.state.titlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 0
}),
Animated.timing(this.state.subTitlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 250
}),
Animated.timing(this.state.buttonOpacity, {
toValue: 1,
duration: 2*ANIMATION_DURATION,
easing: Easing.linear,
delay: 500
})
]).start();
}
componentWillReceiveProps(nextProps) {
const {error} = this.props;
if (nextProps.error && nextProps.error !== error) {
this.dropdown.alertWithType('error', 'Error' , nextProps.error);
}
}
// Text Input handlers
updateRef(name, ref) {
this[name] = ref;
}
onMailChangeText(text) {
this.props.emailChanged(text);
this.onChangeText(text);
}
onPasswordChangeText(text) {
this.props.passwordChanged(text);
this.onChangeText(text);
}
onChangeText(text) {
['email', 'password']
.map((name) => ({ name, ref: this[name] }))
.forEach(({ name, ref }) => {
if (ref.isFocused()) {
this.setState({ [name]: text });
}
});
}
onSubmitEmail() {
this.password.focus();
}
onSubmitPassword() {
this.password.blur();
}
onFocus() {
let { errors = {} } = this.state;
this.props.loginClearError();
for (let name in errors) {
let ref = this[name];
if (ref && ref.isFocused()) {
delete errors[name];
}
}
this.setState({ errors });
}
renderPasswordAccessory() {
const { secureTextEntry } = this.state;
return (
<TouchableOpacity
onPress={() => {this.setState({ secureTextEntry: !this.state.secureTextEntry })}}
>
<FontAwesome
style={{
color: primaryBlueColor,
fontSize: 24,
width: 44,
textAlign: 'right'
}}
>
{(secureTextEntry) ? Icons.eye : Icons.eyeSlash}
</FontAwesome>
</TouchableOpacity>
);
}
// Button press handlers
onSignInButtonPress() {
const { email, password } = this.props;
let errors = {};
['email', 'password']
.forEach((name) => {
let value = this[name].value();
if (!value) {
errors[name] = 'Should not be empty';
} else {
if ('email' === name && !checkEmail(value)) {
errors[name] = 'The email format is wrong';
}
}
});
this.setState({ errors });
if (_.isEmpty(errors)) {
this.props.loginUser({ email, password });
}
}
//////////////////////
dismissModal() {
this.setState({signUpModalVisible: !this.state.signUpModalVisible});
}
createAccount() {
}
renderButton() {
const { loading } = this.props;
const {primaryWhiteTextStyle, startSpinnerStyle} = theme;
if (loading) {
return (
<View style={{
justifyContent: "space-around",
alignItems: 'center',
flexDirection: "column",
flex: 1
}}>
<Spinner
isVisible={true}
size={scale(60)}
type='ThreeBounce'
color={primaryBlueColor}
/>
</View>
);
}
return (
<SignUpButton
onPress={this.onSignInButtonPress.bind(this)}
>
<Text style={[primaryWhiteTextStyle, {fontSize: 14}]}>LOGIN</Text>
</SignUpButton>
);
}
render() {
const {
pageStyle,
logoStyle,
socialContainer,
buttonContainerstyle,
separatorStyle,
buttonTextStyle,
loginCardStyle,
buttonContainerStyle,
submitButtonStyle,
titleTextStyle,
headerStyle,
textStyle
} = styles;
const {
iconStyle,
iconTextStyle,
inputStyle,
inputTitleStyle,
primaryWhiteTextStyle,
primaryGreyTextStyle,
dropDownErrorTitleTextStyle,
dropDownErrorMessageTextStyle
} = theme;
const {
errors,
secureTextEntry,
titlePosition,
subTitlePosition,
buttonOpacity
} = this.state;
const {
name,
email,
password,
children
} = this.props;
firebase.analytics().setCurrentScreen('Login Screen', 'RegisterForm')
if (this.state.loggedIn) {
(children.profile && children.profile.journey) ? Actions.main(): Actions.journey();
return (<View />);
}
return (
<KeyboardAwareScrollView
style={{ backgroundColor: primaryWhiteColor }}
resetScrollToCoords={{ x: 0, y: 0 }}
contentContainerStyle={pageStyle}
scrollEnabled={true}
>
<SafeAreaView style={{flex: 1, backgroundColor: '#fff'}}>
<View style={logoStyle}>
<View style={headerStyle}>
<IconButton onPress={() => {Actions.pop()}}>
<FontAwesome>{Icons.angleLeft}</FontAwesome>
</IconButton>
<View style={{
height: 60,
width: 60
}}/>
</View>
</View>
<View style={{
flex: 3
}}>
<Animated.View style=
{{
marginRight: titlePosition
}}
>
<Text style={titleTextStyle}>
Health
</Text>
</Animated.View>
<Animated.View style=
{{
marginRight: subTitlePosition | }}
>
<Text style={titleTextStyle}>
is a journey
</Text>
</Animated.View>
</View>
<View style={loginCardStyle}>
<TextField
ref={this.emailRef}
label='Email address'
value={email}
onChangeText={this.onMailChangeText.bind(this)}
keyboardType="email-address"
autoCorrect={false}
autoCapitalize='none'
enablesReturnKeyAutomatically={true}
onFocus={this.onFocus.bind(this)}
onSubmitEditing={this.onSubmitEmail.bind(this)}
returnKeyType='next'
error={errors.email}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
/>
<TextField
label='Password'
ref={this.passwordRef}
secureTextEntry={secureTextEntry}
value={password}
autoCapitalize='none'
autoCorrect={false}
enablesReturnKeyAutomatically={true}
clearTextOnFocus={true}
onFocus={this.onFocus.bind(this)}
onChangeText={this.onPasswordChangeText.bind(this)}
onSubmitEditing={this.onSubmitPassword.bind(this)}
returnKeyType='done'
error={errors.password}
title='Between 8 and 20 characters'
titleFontSize={8}
maxLength={20}
characterRestriction={20}
renderAccessory={this.renderPasswordAccessory.bind(this)}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
titleFontSize={14}
/>
</View>
<Animated.View style={[submitButtonStyle, {opacity: buttonOpacity}]}>
{this.renderButton()}
</Animated.View>
<View style={separatorStyle}>
<Hyperlink
linkStyle={ { color: primaryBlueColor } }
onPress={ (url, text) => Actions.password()}
linkText={ url => url === 'http://citizenhealth.io' ? 'password?' : url }
>
<Text style= {[primaryGreyTextStyle, {color: primaryGreyColor}]}>Forgot your http://citizenhealth.io</Text>
</Hyperlink>
</View>
<View style={socialContainer}>
</View>
<DropdownAlert
ref={ref => this.dropdown = ref}
closeInterval={6000}
titleStyle = {dropDownErrorTitleTextStyle}
messageStyle = {dropDownErrorMessageTextStyle}
/>
<ModalDialog
visible={this.state.signUpModalVisible}
label={this.state.dialogState.message}
cancelLabel={this.state.dialogState.cancel}
acceptLabel={this.state.dialog | random_line_split |
|
LoginForm.js | width;
const ANIMATION_DURATION = 500;
class LoginForm extends Component {
constructor(props) {
super(props);
this.emailRef = this.updateRef.bind(this, 'email');
this.passwordRef = this.updateRef.bind(this, 'password');
const buttonOpacityValue = new Animated.Value(0); // declare animated value
const titlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
const subTitlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
this.state = {
signUpModalVisible: false,
dialogState: modalMessages.noEmail,
signInEnabled: false,
secureTextEntry: true,
titlePosition: titlePositionValue,
subTitlePosition: subTitlePositionValue,
buttonOpacity: buttonOpacityValue,
errors: {}
};
}
componentDidMount() {
Animated.parallel([
Animated.timing(this.state.titlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 0
}),
Animated.timing(this.state.subTitlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 250
}),
Animated.timing(this.state.buttonOpacity, {
toValue: 1,
duration: 2*ANIMATION_DURATION,
easing: Easing.linear,
delay: 500
})
]).start();
}
| (nextProps) {
const {error} = this.props;
if (nextProps.error && nextProps.error !== error) {
this.dropdown.alertWithType('error', 'Error' , nextProps.error);
}
}
// Text Input handlers
updateRef(name, ref) {
this[name] = ref;
}
onMailChangeText(text) {
this.props.emailChanged(text);
this.onChangeText(text);
}
onPasswordChangeText(text) {
this.props.passwordChanged(text);
this.onChangeText(text);
}
onChangeText(text) {
['email', 'password']
.map((name) => ({ name, ref: this[name] }))
.forEach(({ name, ref }) => {
if (ref.isFocused()) {
this.setState({ [name]: text });
}
});
}
onSubmitEmail() {
this.password.focus();
}
onSubmitPassword() {
this.password.blur();
}
onFocus() {
let { errors = {} } = this.state;
this.props.loginClearError();
for (let name in errors) {
let ref = this[name];
if (ref && ref.isFocused()) {
delete errors[name];
}
}
this.setState({ errors });
}
renderPasswordAccessory() {
const { secureTextEntry } = this.state;
return (
<TouchableOpacity
onPress={() => {this.setState({ secureTextEntry: !this.state.secureTextEntry })}}
>
<FontAwesome
style={{
color: primaryBlueColor,
fontSize: 24,
width: 44,
textAlign: 'right'
}}
>
{(secureTextEntry) ? Icons.eye : Icons.eyeSlash}
</FontAwesome>
</TouchableOpacity>
);
}
// Button press handlers
onSignInButtonPress() {
const { email, password } = this.props;
let errors = {};
['email', 'password']
.forEach((name) => {
let value = this[name].value();
if (!value) {
errors[name] = 'Should not be empty';
} else {
if ('email' === name && !checkEmail(value)) {
errors[name] = 'The email format is wrong';
}
}
});
this.setState({ errors });
if (_.isEmpty(errors)) {
this.props.loginUser({ email, password });
}
}
//////////////////////
dismissModal() {
this.setState({signUpModalVisible: !this.state.signUpModalVisible});
}
createAccount() {
}
renderButton() {
const { loading } = this.props;
const {primaryWhiteTextStyle, startSpinnerStyle} = theme;
if (loading) {
return (
<View style={{
justifyContent: "space-around",
alignItems: 'center',
flexDirection: "column",
flex: 1
}}>
<Spinner
isVisible={true}
size={scale(60)}
type='ThreeBounce'
color={primaryBlueColor}
/>
</View>
);
}
return (
<SignUpButton
onPress={this.onSignInButtonPress.bind(this)}
>
<Text style={[primaryWhiteTextStyle, {fontSize: 14}]}>LOGIN</Text>
</SignUpButton>
);
}
render() {
const {
pageStyle,
logoStyle,
socialContainer,
buttonContainerstyle,
separatorStyle,
buttonTextStyle,
loginCardStyle,
buttonContainerStyle,
submitButtonStyle,
titleTextStyle,
headerStyle,
textStyle
} = styles;
const {
iconStyle,
iconTextStyle,
inputStyle,
inputTitleStyle,
primaryWhiteTextStyle,
primaryGreyTextStyle,
dropDownErrorTitleTextStyle,
dropDownErrorMessageTextStyle
} = theme;
const {
errors,
secureTextEntry,
titlePosition,
subTitlePosition,
buttonOpacity
} = this.state;
const {
name,
email,
password,
children
} = this.props;
firebase.analytics().setCurrentScreen('Login Screen', 'RegisterForm')
if (this.state.loggedIn) {
(children.profile && children.profile.journey) ? Actions.main(): Actions.journey();
return (<View />);
}
return (
<KeyboardAwareScrollView
style={{ backgroundColor: primaryWhiteColor }}
resetScrollToCoords={{ x: 0, y: 0 }}
contentContainerStyle={pageStyle}
scrollEnabled={true}
>
<SafeAreaView style={{flex: 1, backgroundColor: '#fff'}}>
<View style={logoStyle}>
<View style={headerStyle}>
<IconButton onPress={() => {Actions.pop()}}>
<FontAwesome>{Icons.angleLeft}</FontAwesome>
</IconButton>
<View style={{
height: 60,
width: 60
}}/>
</View>
</View>
<View style={{
flex: 3
}}>
<Animated.View style=
{{
marginRight: titlePosition
}}
>
<Text style={titleTextStyle}>
Health
</Text>
</Animated.View>
<Animated.View style=
{{
marginRight: subTitlePosition
}}
>
<Text style={titleTextStyle}>
is a journey
</Text>
</Animated.View>
</View>
<View style={loginCardStyle}>
<TextField
ref={this.emailRef}
label='Email address'
value={email}
onChangeText={this.onMailChangeText.bind(this)}
keyboardType="email-address"
autoCorrect={false}
autoCapitalize='none'
enablesReturnKeyAutomatically={true}
onFocus={this.onFocus.bind(this)}
onSubmitEditing={this.onSubmitEmail.bind(this)}
returnKeyType='next'
error={errors.email}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
/>
<TextField
label='Password'
ref={this.passwordRef}
secureTextEntry={secureTextEntry}
value={password}
autoCapitalize='none'
autoCorrect={false}
enablesReturnKeyAutomatically={true}
clearTextOnFocus={true}
onFocus={this.onFocus.bind(this)}
onChangeText={this.onPasswordChangeText.bind(this)}
onSubmitEditing={this.onSubmitPassword.bind(this)}
returnKeyType='done'
error={errors.password}
title='Between 8 and 20 characters'
titleFontSize={8}
maxLength={20}
characterRestriction={20}
renderAccessory={this.renderPasswordAccessory.bind(this)}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
titleFontSize={14}
/>
</View>
<Animated.View style={[submitButtonStyle, {opacity: buttonOpacity}]}>
{this.renderButton()}
</Animated.View>
<View style={separatorStyle}>
<Hyperlink
linkStyle={ { color: primaryBlueColor } }
onPress={ (url, text) => Actions.password()}
linkText={ url => url === 'http://citizenhealth.io' ? 'password?' : url }
>
<Text style= {[primaryGreyTextStyle, {color: primaryGreyColor}]}>Forgot your http://citizenhealth.io</Text>
</Hyperlink>
</View>
<View style={socialContainer}>
</View>
<DropdownAlert
ref={ref => this.dropdown = ref}
closeInterval={6000}
titleStyle = {dropDownErrorTitleTextStyle}
messageStyle = {dropDownErrorMessageTextStyle}
/>
<ModalDialog
visible={this.state.signUpModalVisible}
label={this.state.dialogState.message}
cancelLabel={this.state.dialogState.cancel}
acceptLabel={this.state | componentWillReceiveProps | identifier_name |
LoginForm.js | width;
const ANIMATION_DURATION = 500;
class LoginForm extends Component {
constructor(props) {
super(props);
this.emailRef = this.updateRef.bind(this, 'email');
this.passwordRef = this.updateRef.bind(this, 'password');
const buttonOpacityValue = new Animated.Value(0); // declare animated value
const titlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
const subTitlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
this.state = {
signUpModalVisible: false,
dialogState: modalMessages.noEmail,
signInEnabled: false,
secureTextEntry: true,
titlePosition: titlePositionValue,
subTitlePosition: subTitlePositionValue,
buttonOpacity: buttonOpacityValue,
errors: {}
};
}
componentDidMount() {
Animated.parallel([
Animated.timing(this.state.titlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 0
}),
Animated.timing(this.state.subTitlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 250
}),
Animated.timing(this.state.buttonOpacity, {
toValue: 1,
duration: 2*ANIMATION_DURATION,
easing: Easing.linear,
delay: 500
})
]).start();
}
componentWillReceiveProps(nextProps) {
const {error} = this.props;
if (nextProps.error && nextProps.error !== error) {
this.dropdown.alertWithType('error', 'Error' , nextProps.error);
}
}
// Text Input handlers
updateRef(name, ref) {
this[name] = ref;
}
onMailChangeText(text) |
onPasswordChangeText(text) {
this.props.passwordChanged(text);
this.onChangeText(text);
}
onChangeText(text) {
['email', 'password']
.map((name) => ({ name, ref: this[name] }))
.forEach(({ name, ref }) => {
if (ref.isFocused()) {
this.setState({ [name]: text });
}
});
}
onSubmitEmail() {
this.password.focus();
}
onSubmitPassword() {
this.password.blur();
}
onFocus() {
let { errors = {} } = this.state;
this.props.loginClearError();
for (let name in errors) {
let ref = this[name];
if (ref && ref.isFocused()) {
delete errors[name];
}
}
this.setState({ errors });
}
renderPasswordAccessory() {
const { secureTextEntry } = this.state;
return (
<TouchableOpacity
onPress={() => {this.setState({ secureTextEntry: !this.state.secureTextEntry })}}
>
<FontAwesome
style={{
color: primaryBlueColor,
fontSize: 24,
width: 44,
textAlign: 'right'
}}
>
{(secureTextEntry) ? Icons.eye : Icons.eyeSlash}
</FontAwesome>
</TouchableOpacity>
);
}
// Button press handlers
onSignInButtonPress() {
const { email, password } = this.props;
let errors = {};
['email', 'password']
.forEach((name) => {
let value = this[name].value();
if (!value) {
errors[name] = 'Should not be empty';
} else {
if ('email' === name && !checkEmail(value)) {
errors[name] = 'The email format is wrong';
}
}
});
this.setState({ errors });
if (_.isEmpty(errors)) {
this.props.loginUser({ email, password });
}
}
//////////////////////
dismissModal() {
this.setState({signUpModalVisible: !this.state.signUpModalVisible});
}
createAccount() {
}
renderButton() {
const { loading } = this.props;
const {primaryWhiteTextStyle, startSpinnerStyle} = theme;
if (loading) {
return (
<View style={{
justifyContent: "space-around",
alignItems: 'center',
flexDirection: "column",
flex: 1
}}>
<Spinner
isVisible={true}
size={scale(60)}
type='ThreeBounce'
color={primaryBlueColor}
/>
</View>
);
}
return (
<SignUpButton
onPress={this.onSignInButtonPress.bind(this)}
>
<Text style={[primaryWhiteTextStyle, {fontSize: 14}]}>LOGIN</Text>
</SignUpButton>
);
}
render() {
const {
pageStyle,
logoStyle,
socialContainer,
buttonContainerstyle,
separatorStyle,
buttonTextStyle,
loginCardStyle,
buttonContainerStyle,
submitButtonStyle,
titleTextStyle,
headerStyle,
textStyle
} = styles;
const {
iconStyle,
iconTextStyle,
inputStyle,
inputTitleStyle,
primaryWhiteTextStyle,
primaryGreyTextStyle,
dropDownErrorTitleTextStyle,
dropDownErrorMessageTextStyle
} = theme;
const {
errors,
secureTextEntry,
titlePosition,
subTitlePosition,
buttonOpacity
} = this.state;
const {
name,
email,
password,
children
} = this.props;
firebase.analytics().setCurrentScreen('Login Screen', 'RegisterForm')
if (this.state.loggedIn) {
(children.profile && children.profile.journey) ? Actions.main(): Actions.journey();
return (<View />);
}
return (
<KeyboardAwareScrollView
style={{ backgroundColor: primaryWhiteColor }}
resetScrollToCoords={{ x: 0, y: 0 }}
contentContainerStyle={pageStyle}
scrollEnabled={true}
>
<SafeAreaView style={{flex: 1, backgroundColor: '#fff'}}>
<View style={logoStyle}>
<View style={headerStyle}>
<IconButton onPress={() => {Actions.pop()}}>
<FontAwesome>{Icons.angleLeft}</FontAwesome>
</IconButton>
<View style={{
height: 60,
width: 60
}}/>
</View>
</View>
<View style={{
flex: 3
}}>
<Animated.View style=
{{
marginRight: titlePosition
}}
>
<Text style={titleTextStyle}>
Health
</Text>
</Animated.View>
<Animated.View style=
{{
marginRight: subTitlePosition
}}
>
<Text style={titleTextStyle}>
is a journey
</Text>
</Animated.View>
</View>
<View style={loginCardStyle}>
<TextField
ref={this.emailRef}
label='Email address'
value={email}
onChangeText={this.onMailChangeText.bind(this)}
keyboardType="email-address"
autoCorrect={false}
autoCapitalize='none'
enablesReturnKeyAutomatically={true}
onFocus={this.onFocus.bind(this)}
onSubmitEditing={this.onSubmitEmail.bind(this)}
returnKeyType='next'
error={errors.email}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
/>
<TextField
label='Password'
ref={this.passwordRef}
secureTextEntry={secureTextEntry}
value={password}
autoCapitalize='none'
autoCorrect={false}
enablesReturnKeyAutomatically={true}
clearTextOnFocus={true}
onFocus={this.onFocus.bind(this)}
onChangeText={this.onPasswordChangeText.bind(this)}
onSubmitEditing={this.onSubmitPassword.bind(this)}
returnKeyType='done'
error={errors.password}
title='Between 8 and 20 characters'
titleFontSize={8}
maxLength={20}
characterRestriction={20}
renderAccessory={this.renderPasswordAccessory.bind(this)}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
titleFontSize={14}
/>
</View>
<Animated.View style={[submitButtonStyle, {opacity: buttonOpacity}]}>
{this.renderButton()}
</Animated.View>
<View style={separatorStyle}>
<Hyperlink
linkStyle={ { color: primaryBlueColor } }
onPress={ (url, text) => Actions.password()}
linkText={ url => url === 'http://citizenhealth.io' ? 'password?' : url }
>
<Text style= {[primaryGreyTextStyle, {color: primaryGreyColor}]}>Forgot your http://citizenhealth.io</Text>
</Hyperlink>
</View>
<View style={socialContainer}>
</View>
<DropdownAlert
ref={ref => this.dropdown = ref}
closeInterval={6000}
titleStyle = {dropDownErrorTitleTextStyle}
messageStyle = {dropDownErrorMessageTextStyle}
/>
<ModalDialog
visible={this.state.signUpModalVisible}
label={this.state.dialogState.message}
cancelLabel={this.state.dialogState.cancel}
acceptLabel={this | {
this.props.emailChanged(text);
this.onChangeText(text);
} | identifier_body |
LoginForm.js | width;
const ANIMATION_DURATION = 500;
class LoginForm extends Component {
constructor(props) {
super(props);
this.emailRef = this.updateRef.bind(this, 'email');
this.passwordRef = this.updateRef.bind(this, 'password');
const buttonOpacityValue = new Animated.Value(0); // declare animated value
const titlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
const subTitlePositionValue = new Animated.Value(-SCREEN_WIDTH); // declare animated value
this.state = {
signUpModalVisible: false,
dialogState: modalMessages.noEmail,
signInEnabled: false,
secureTextEntry: true,
titlePosition: titlePositionValue,
subTitlePosition: subTitlePositionValue,
buttonOpacity: buttonOpacityValue,
errors: {}
};
}
componentDidMount() {
Animated.parallel([
Animated.timing(this.state.titlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 0
}),
Animated.timing(this.state.subTitlePosition, {
toValue: 40,
duration: ANIMATION_DURATION,
easing: Easing.linear,
delay: 250
}),
Animated.timing(this.state.buttonOpacity, {
toValue: 1,
duration: 2*ANIMATION_DURATION,
easing: Easing.linear,
delay: 500
})
]).start();
}
componentWillReceiveProps(nextProps) {
const {error} = this.props;
if (nextProps.error && nextProps.error !== error) {
this.dropdown.alertWithType('error', 'Error' , nextProps.error);
}
}
// Text Input handlers
updateRef(name, ref) {
this[name] = ref;
}
onMailChangeText(text) {
this.props.emailChanged(text);
this.onChangeText(text);
}
onPasswordChangeText(text) {
this.props.passwordChanged(text);
this.onChangeText(text);
}
onChangeText(text) {
['email', 'password']
.map((name) => ({ name, ref: this[name] }))
.forEach(({ name, ref }) => {
if (ref.isFocused()) {
this.setState({ [name]: text });
}
});
}
onSubmitEmail() {
this.password.focus();
}
onSubmitPassword() {
this.password.blur();
}
onFocus() {
let { errors = {} } = this.state;
this.props.loginClearError();
for (let name in errors) {
let ref = this[name];
if (ref && ref.isFocused()) {
delete errors[name];
}
}
this.setState({ errors });
}
renderPasswordAccessory() {
const { secureTextEntry } = this.state;
return (
<TouchableOpacity
onPress={() => {this.setState({ secureTextEntry: !this.state.secureTextEntry })}}
>
<FontAwesome
style={{
color: primaryBlueColor,
fontSize: 24,
width: 44,
textAlign: 'right'
}}
>
{(secureTextEntry) ? Icons.eye : Icons.eyeSlash}
</FontAwesome>
</TouchableOpacity>
);
}
// Button press handlers
onSignInButtonPress() {
const { email, password } = this.props;
let errors = {};
['email', 'password']
.forEach((name) => {
let value = this[name].value();
if (!value) {
errors[name] = 'Should not be empty';
} else {
if ('email' === name && !checkEmail(value)) {
errors[name] = 'The email format is wrong';
}
}
});
this.setState({ errors });
if (_.isEmpty(errors)) {
this.props.loginUser({ email, password });
}
}
//////////////////////
dismissModal() {
this.setState({signUpModalVisible: !this.state.signUpModalVisible});
}
createAccount() {
}
renderButton() {
const { loading } = this.props;
const {primaryWhiteTextStyle, startSpinnerStyle} = theme;
if (loading) {
return (
<View style={{
justifyContent: "space-around",
alignItems: 'center',
flexDirection: "column",
flex: 1
}}>
<Spinner
isVisible={true}
size={scale(60)}
type='ThreeBounce'
color={primaryBlueColor}
/>
</View>
);
}
return (
<SignUpButton
onPress={this.onSignInButtonPress.bind(this)}
>
<Text style={[primaryWhiteTextStyle, {fontSize: 14}]}>LOGIN</Text>
</SignUpButton>
);
}
render() {
const {
pageStyle,
logoStyle,
socialContainer,
buttonContainerstyle,
separatorStyle,
buttonTextStyle,
loginCardStyle,
buttonContainerStyle,
submitButtonStyle,
titleTextStyle,
headerStyle,
textStyle
} = styles;
const {
iconStyle,
iconTextStyle,
inputStyle,
inputTitleStyle,
primaryWhiteTextStyle,
primaryGreyTextStyle,
dropDownErrorTitleTextStyle,
dropDownErrorMessageTextStyle
} = theme;
const {
errors,
secureTextEntry,
titlePosition,
subTitlePosition,
buttonOpacity
} = this.state;
const {
name,
email,
password,
children
} = this.props;
firebase.analytics().setCurrentScreen('Login Screen', 'RegisterForm')
if (this.state.loggedIn) |
return (
<KeyboardAwareScrollView
style={{ backgroundColor: primaryWhiteColor }}
resetScrollToCoords={{ x: 0, y: 0 }}
contentContainerStyle={pageStyle}
scrollEnabled={true}
>
<SafeAreaView style={{flex: 1, backgroundColor: '#fff'}}>
<View style={logoStyle}>
<View style={headerStyle}>
<IconButton onPress={() => {Actions.pop()}}>
<FontAwesome>{Icons.angleLeft}</FontAwesome>
</IconButton>
<View style={{
height: 60,
width: 60
}}/>
</View>
</View>
<View style={{
flex: 3
}}>
<Animated.View style=
{{
marginRight: titlePosition
}}
>
<Text style={titleTextStyle}>
Health
</Text>
</Animated.View>
<Animated.View style=
{{
marginRight: subTitlePosition
}}
>
<Text style={titleTextStyle}>
is a journey
</Text>
</Animated.View>
</View>
<View style={loginCardStyle}>
<TextField
ref={this.emailRef}
label='Email address'
value={email}
onChangeText={this.onMailChangeText.bind(this)}
keyboardType="email-address"
autoCorrect={false}
autoCapitalize='none'
enablesReturnKeyAutomatically={true}
onFocus={this.onFocus.bind(this)}
onSubmitEditing={this.onSubmitEmail.bind(this)}
returnKeyType='next'
error={errors.email}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
/>
<TextField
label='Password'
ref={this.passwordRef}
secureTextEntry={secureTextEntry}
value={password}
autoCapitalize='none'
autoCorrect={false}
enablesReturnKeyAutomatically={true}
clearTextOnFocus={true}
onFocus={this.onFocus.bind(this)}
onChangeText={this.onPasswordChangeText.bind(this)}
onSubmitEditing={this.onSubmitPassword.bind(this)}
returnKeyType='done'
error={errors.password}
title='Between 8 and 20 characters'
titleFontSize={8}
maxLength={20}
characterRestriction={20}
renderAccessory={this.renderPasswordAccessory.bind(this)}
textColor={graphGreyColor}
baseColor={graphGreyColor}
tintColor={primaryGreyColor}
labelTextStyle={inputStyle}
titleTextStyle={inputTitleStyle}
titleFontSize={14}
/>
</View>
<Animated.View style={[submitButtonStyle, {opacity: buttonOpacity}]}>
{this.renderButton()}
</Animated.View>
<View style={separatorStyle}>
<Hyperlink
linkStyle={ { color: primaryBlueColor } }
onPress={ (url, text) => Actions.password()}
linkText={ url => url === 'http://citizenhealth.io' ? 'password?' : url }
>
<Text style= {[primaryGreyTextStyle, {color: primaryGreyColor}]}>Forgot your http://citizenhealth.io</Text>
</Hyperlink>
</View>
<View style={socialContainer}>
</View>
<DropdownAlert
ref={ref => this.dropdown = ref}
closeInterval={6000}
titleStyle = {dropDownErrorTitleTextStyle}
messageStyle = {dropDownErrorMessageTextStyle}
/>
<ModalDialog
visible={this.state.signUpModalVisible}
label={this.state.dialogState.message}
cancelLabel={this.state.dialogState.cancel}
acceptLabel={this | {
(children.profile && children.profile.journey) ? Actions.main(): Actions.journey();
return (<View />);
} | conditional_block |
provision.go | return err
}
if err := p.checkAndDeployProxy(authProxyName, customizedProxy, p.forceProxyInstall, verbosef); err != nil {
return errors.Wrapf(err, "deploying runtime proxy %s", authProxyName)
}
// Deploy remote-token proxy
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteTokenProxy, nil)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyTokenProxy, replaceVH)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(tokenProxyName, customizedProxy, false, verbosef); err != nil {
return errors.Wrapf(err, "deploying token proxy %s", tokenProxyName)
}
if !p.IsGCPManaged {
cred, err = p.createLegacyCredential(verbosef) // TODO: on missing or force new cred
if err != nil {
return errors.Wrapf(err, "generating credential")
}
if err := p.getOrCreateKVM(cred, verbosef); err != nil {
return errors.Wrapf(err, "retrieving or creating kvm")
}
}
config := p.ServerConfig
if config == nil {
config = p.createConfig(cred)
}
if p.IsGCPManaged && (config.Tenant.PrivateKey == nil || p.rotate > 0) {
var keyID string
var privateKey *rsa.PrivateKey
var jwks jwk.Set
var err error
if p.isCloud() { // attempt to fetch secrets from propertysets
keyID, privateKey, jwks, err = p.policySecretsFromPropertyset()
}
if err != nil || privateKey == nil {
verbosef("no existing policy secret, creating new ones")
keyID, privateKey, jwks, err = p.CreateNewKey()
if err != nil {
return err
}
}
config.Tenant.PrivateKey = privateKey
config.Tenant.PrivateKeyID = keyID
if jwks, err = p.RotateJWKS(jwks, p.rotate); err != nil {
return err
}
config.Tenant.JWKS = jwks
}
var verifyErrors error
if p.IsGCPManaged {
verifyErrors = p.verifyWithRetry(config, verbosef)
// creates the policy secrets if is GCP managed
if err := p.createPolicySecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating policy secret data")
}
// create the analytics secrets if is GCP managed
if err := p.createAnalyticsSecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating analytics secret data")
}
if len(p.analyticsSecretData) == 0 {
shared.Errorf("\nWARNING: No analytics service account given via --analytics-sa or config.yaml.")
shared.Errorf("\nIMPORTANT: Please make sure the application default credentials where the adapter is run are correctly configured.")
}
} else {
verifyErrors = p.verifyWithoutRetry(config, verbosef)
}
if err := p.printConfig(config, printf, verifyErrors, verbosef); err != nil {
return errors.Wrapf(err, "generating config")
}
if verifyErrors == nil {
verbosef("provisioning verified OK")
}
// return possible errors if not hybrid
if !p.IsGCPManaged || p.isCloud() {
return verifyErrors
}
// output this warning for hybrid
if p.rotate > 0 {
shared.Errorf("\nIMPORTANT: Provisioned config with rotated secrets needs to be applied onto the k8s cluster to take effect.")
} else {
shared.Errorf("\nIMPORTANT: Provisioned config needs to be applied onto the k8s cluster to take effect.")
}
return verifyErrors
}
// retrieveRuntimeType fetches the organization information from the management base and extracts the runtime type
func (p *provision) retrieveRuntimeType() error {
req, err := p.ApigeeClient.NewRequestNoEnv(http.MethodGet, "", nil)
if err != nil {
return err
}
org := &apigee.Organization{}
if _, err := p.ApigeeClient.Do(req, org); err != nil {
return err
}
p.runtimeType = org.RuntimeType
return nil
}
// isCloud determines whether it is NG SaaS
func (p *provision) isCloud() bool {
return p.IsGCPManaged && p.runtimeType == "CLOUD"
}
// policySecretsFromPropertyset retrieves the policy secret from the remote-service propertyset
// the returned values will be empty or nil if such propertyset does not exist or there is other error fetching it
func (p *provision) policySecretsFromPropertyset() (keyID string, privateKey *rsa.PrivateKey, jwks jwk.Set, err error) {
req, err := p.ApigeeClient.NewRequest(http.MethodGet, fmt.Sprintf(propertysetGETOrPUTURL, "remote-service"), nil)
if err != nil {
return
}
buf := new(bytes.Buffer)
_, err = p.ApigeeClient.Do(req, buf)
if err != nil {
return
}
// read the response into a map
m, err := server.ReadProperties(buf)
if err != nil {
return
}
// extracts the jwks from the map
jwksStr, ok := m["crt"]
if !ok {
err = fmt.Errorf("crt not found in remote-service propertyset")
return
}
jwks = jwk.NewSet()
err = json.Unmarshal([]byte(jwksStr), jwks)
if err != nil {
return
}
// extracts the private key from the map
pkStr, ok := m["key"]
if !ok {
err = fmt.Errorf("key not found in remote-service propertyset")
return
}
privateKey, err = server.LoadPrivateKey([]byte(strings.ReplaceAll(pkStr, `\n`, "\n")))
if err != nil {
return
}
// extracts the key id from the map
keyID, ok = m[server.SecretPropsKIDKey]
if !ok {
err = fmt.Errorf("kid not found in remote-service propertyset")
return
}
return
}
func (p *provision) createAuthorizedClient(config *server.Config) (*http.Client, error) {
// add authorization to transport
tr := http.DefaultTransport
if config.Tenant.TLS.AllowUnverifiedSSLCert {
tr = &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
DualStack: true,
}).DialContext,
MaxIdleConns: 100,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
}
tr, err := server.AuthorizationRoundTripper(config, tr)
if err != nil {
return nil, err
}
return &http.Client{
Timeout: config.Tenant.ClientTimeout,
Transport: tr,
}, nil
}
func (p *provision) verifyWithRetry(config *server.Config, verbosef shared.FormatFn) error {
var verifyErrors error
timeout := time.After(duration)
tick := time.Tick(interval)
for {
select {
case <-timeout:
if verifyErrors != nil {
shared.Errorf("\nWARNING: Apigee may not be provisioned properly.")
shared.Errorf("Unable to verify proxy endpoint(s). Errors:\n")
for _, err := range errorset.Errors(verifyErrors) {
shared.Errorf(" %s", err)
}
shared.Errorf("\n")
}
return verifyErrors
case <-tick:
verifyErrors = p.verify(config, verbosef)
if verifyErrors == nil {
return nil
}
verbosef("verifying proxies failed, trying again...")
}
}
}
func (p *provision) verifyWithoutRetry(config *server.Config, verbosef shared.FormatFn) error {
verifyErrors := p.verify(config, verbosef)
if verifyErrors != nil {
shared.Errorf("\nWARNING: Apigee may not be provisioned properly.")
shared.Errorf("Unable to verify proxy endpoint(s). Errors:\n")
for _, err := range errorset.Errors(verifyErrors) {
shared.Errorf(" %s", err)
}
shared.Errorf("\n")
}
return verifyErrors
}
func (p *provision) verify(config *server.Config, verbosef shared.FormatFn) error | {
client, err := p.createAuthorizedClient(config)
if err != nil {
return err
}
var verifyErrors error
if p.IsLegacySaaS || p.IsOPDK {
verbosef("verifying internal proxy...")
verifyErrors = p.verifyInternalProxy(client, verbosef)
}
verbosef("verifying remote-service proxy...")
verifyErrors = errorset.Append(verifyErrors, p.verifyRemoteServiceProxy(client, verbosef))
return verifyErrors
} | identifier_body |
|
provision.go | != "" {
newVH = newVH + fmt.Sprintf(virtualHostReplacementFmt, vh)
}
}
// remove all "secure" virtualhost
bytes = []byte(strings.ReplaceAll(string(bytes), virtualHostDeleteText, ""))
// replace the "default" virtualhost
bytes = []byte(strings.Replace(string(bytes), virtualHostReplaceText, newVH, 1))
if err := os.WriteFile(proxiesFile, bytes, 0); err != nil {
return errors.Wrapf(err, "writing file %s", proxiesFile)
}
return nil
}
replaceInFile := func(file, old, new string) error {
bytes, err := os.ReadFile(file)
if err != nil {
return errors.Wrapf(err, "reading file %s", file)
}
bytes = []byte(strings.Replace(string(bytes), old, new, 1))
if err := os.WriteFile(file, bytes, 0); err != nil {
return errors.Wrapf(err, "writing file %s", file)
}
return nil
}
// replace the version using the build info
replaceVersion := func(proxyDir string) error {
calloutFile := filepath.Join(proxyDir, "policies", "Send-Version.xml")
oldValue := `"version":"{{version}}"`
newValue := fmt.Sprintf(`"version":"%s"`, shared.BuildInfo.Version)
if err := replaceInFile(calloutFile, oldValue, newValue); err != nil {
return err
}
return nil
}
replaceVHAndAuthTarget := func(proxyDir string) error {
if err := replaceVH(proxyDir); err != nil {
return err
}
if err := replaceVersion(proxyDir); err != nil {
return err
}
if p.IsOPDK {
// OPDK must target local internal proxy
authFile := filepath.Join(proxyDir, "policies", "Authenticate-Call.xml")
oldTarget := "https://edgemicroservices.apigee.net"
newTarget := p.RuntimeBase
if err := replaceInFile(authFile, oldTarget, newTarget); err != nil {
return err
}
// OPDK must have org.noncps = true for products callout
calloutFile := filepath.Join(proxyDir, "policies", "JavaCallout.xml")
oldValue := "</Properties>"
newValue := `<Property name="org.noncps">true</Property>
</Properties>`
if err := replaceInFile(calloutFile, oldValue, newValue); err != nil {
return err
}
}
return nil
}
if p.IsOPDK {
if err := p.deployInternalProxy(replaceVH, tempDir, verbosef); err != nil {
return errors.Wrap(err, "deploying internal proxy")
}
}
// deploy remote-service proxy
var customizedProxy string
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteServiceProxy, replaceVersion)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyServiceProxy, replaceVHAndAuthTarget)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(authProxyName, customizedProxy, p.forceProxyInstall, verbosef); err != nil {
return errors.Wrapf(err, "deploying runtime proxy %s", authProxyName)
}
// Deploy remote-token proxy
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteTokenProxy, nil)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyTokenProxy, replaceVH)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(tokenProxyName, customizedProxy, false, verbosef); err != nil {
return errors.Wrapf(err, "deploying token proxy %s", tokenProxyName)
}
if !p.IsGCPManaged {
cred, err = p.createLegacyCredential(verbosef) // TODO: on missing or force new cred
if err != nil {
return errors.Wrapf(err, "generating credential")
}
if err := p.getOrCreateKVM(cred, verbosef); err != nil {
return errors.Wrapf(err, "retrieving or creating kvm")
}
}
config := p.ServerConfig
if config == nil {
config = p.createConfig(cred)
}
if p.IsGCPManaged && (config.Tenant.PrivateKey == nil || p.rotate > 0) {
var keyID string
var privateKey *rsa.PrivateKey
var jwks jwk.Set
var err error
if p.isCloud() { // attempt to fetch secrets from propertysets
keyID, privateKey, jwks, err = p.policySecretsFromPropertyset()
}
if err != nil || privateKey == nil {
verbosef("no existing policy secret, creating new ones")
keyID, privateKey, jwks, err = p.CreateNewKey()
if err != nil {
return err
}
}
config.Tenant.PrivateKey = privateKey
config.Tenant.PrivateKeyID = keyID
if jwks, err = p.RotateJWKS(jwks, p.rotate); err != nil {
return err
}
config.Tenant.JWKS = jwks
}
var verifyErrors error
if p.IsGCPManaged {
verifyErrors = p.verifyWithRetry(config, verbosef)
// creates the policy secrets if is GCP managed
if err := p.createPolicySecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating policy secret data")
}
// create the analytics secrets if is GCP managed
if err := p.createAnalyticsSecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating analytics secret data")
}
if len(p.analyticsSecretData) == 0 {
shared.Errorf("\nWARNING: No analytics service account given via --analytics-sa or config.yaml.")
shared.Errorf("\nIMPORTANT: Please make sure the application default credentials where the adapter is run are correctly configured.")
}
} else {
verifyErrors = p.verifyWithoutRetry(config, verbosef)
}
if err := p.printConfig(config, printf, verifyErrors, verbosef); err != nil {
return errors.Wrapf(err, "generating config")
}
if verifyErrors == nil {
verbosef("provisioning verified OK")
}
// return possible errors if not hybrid
if !p.IsGCPManaged || p.isCloud() {
return verifyErrors
}
// output this warning for hybrid
if p.rotate > 0 {
shared.Errorf("\nIMPORTANT: Provisioned config with rotated secrets needs to be applied onto the k8s cluster to take effect.")
} else {
shared.Errorf("\nIMPORTANT: Provisioned config needs to be applied onto the k8s cluster to take effect.")
}
return verifyErrors
}
// retrieveRuntimeType fetches the organization information from the management base and extracts the runtime type
func (p *provision) retrieveRuntimeType() error {
req, err := p.ApigeeClient.NewRequestNoEnv(http.MethodGet, "", nil)
if err != nil {
return err
}
org := &apigee.Organization{}
if _, err := p.ApigeeClient.Do(req, org); err != nil {
return err
}
p.runtimeType = org.RuntimeType
return nil
}
// isCloud determines whether it is NG SaaS
func (p *provision) isCloud() bool {
return p.IsGCPManaged && p.runtimeType == "CLOUD"
}
// policySecretsFromPropertyset retrieves the policy secret from the remote-service propertyset
// the returned values will be empty or nil if such propertyset does not exist or there is other error fetching it
func (p *provision) policySecretsFromPropertyset() (keyID string, privateKey *rsa.PrivateKey, jwks jwk.Set, err error) {
req, err := p.ApigeeClient.NewRequest(http.MethodGet, fmt.Sprintf(propertysetGETOrPUTURL, "remote-service"), nil)
if err != nil {
return
}
buf := new(bytes.Buffer)
_, err = p.ApigeeClient.Do(req, buf)
if err != nil {
return
}
// read the response into a map
m, err := server.ReadProperties(buf)
if err != nil {
return
}
// extracts the jwks from the map
jwksStr, ok := m["crt"]
if !ok {
err = fmt.Errorf("crt not found in remote-service propertyset")
return
}
jwks = jwk.NewSet()
err = json.Unmarshal([]byte(jwksStr), jwks)
if err != nil {
return
}
// extracts the private key from the map
pkStr, ok := m["key"]
if !ok {
err = fmt.Errorf("key not found in remote-service propertyset")
return
} | privateKey, err = server.LoadPrivateKey([]byte(strings.ReplaceAll(pkStr, `\n`, "\n")))
if err != nil {
return
}
| random_line_split |
|
provision.go | default" virtualhost
bytes = []byte(strings.Replace(string(bytes), virtualHostReplaceText, newVH, 1))
if err := os.WriteFile(proxiesFile, bytes, 0); err != nil {
return errors.Wrapf(err, "writing file %s", proxiesFile)
}
return nil
}
replaceInFile := func(file, old, new string) error {
bytes, err := os.ReadFile(file)
if err != nil {
return errors.Wrapf(err, "reading file %s", file)
}
bytes = []byte(strings.Replace(string(bytes), old, new, 1))
if err := os.WriteFile(file, bytes, 0); err != nil {
return errors.Wrapf(err, "writing file %s", file)
}
return nil
}
// replace the version using the build info
replaceVersion := func(proxyDir string) error {
calloutFile := filepath.Join(proxyDir, "policies", "Send-Version.xml")
oldValue := `"version":"{{version}}"`
newValue := fmt.Sprintf(`"version":"%s"`, shared.BuildInfo.Version)
if err := replaceInFile(calloutFile, oldValue, newValue); err != nil {
return err
}
return nil
}
replaceVHAndAuthTarget := func(proxyDir string) error {
if err := replaceVH(proxyDir); err != nil {
return err
}
if err := replaceVersion(proxyDir); err != nil {
return err
}
if p.IsOPDK {
// OPDK must target local internal proxy
authFile := filepath.Join(proxyDir, "policies", "Authenticate-Call.xml")
oldTarget := "https://edgemicroservices.apigee.net"
newTarget := p.RuntimeBase
if err := replaceInFile(authFile, oldTarget, newTarget); err != nil {
return err
}
// OPDK must have org.noncps = true for products callout
calloutFile := filepath.Join(proxyDir, "policies", "JavaCallout.xml")
oldValue := "</Properties>"
newValue := `<Property name="org.noncps">true</Property>
</Properties>`
if err := replaceInFile(calloutFile, oldValue, newValue); err != nil {
return err
}
}
return nil
}
if p.IsOPDK {
if err := p.deployInternalProxy(replaceVH, tempDir, verbosef); err != nil {
return errors.Wrap(err, "deploying internal proxy")
}
}
// deploy remote-service proxy
var customizedProxy string
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteServiceProxy, replaceVersion)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyServiceProxy, replaceVHAndAuthTarget)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(authProxyName, customizedProxy, p.forceProxyInstall, verbosef); err != nil {
return errors.Wrapf(err, "deploying runtime proxy %s", authProxyName)
}
// Deploy remote-token proxy
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteTokenProxy, nil)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyTokenProxy, replaceVH)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(tokenProxyName, customizedProxy, false, verbosef); err != nil {
return errors.Wrapf(err, "deploying token proxy %s", tokenProxyName)
}
if !p.IsGCPManaged {
cred, err = p.createLegacyCredential(verbosef) // TODO: on missing or force new cred
if err != nil {
return errors.Wrapf(err, "generating credential")
}
if err := p.getOrCreateKVM(cred, verbosef); err != nil {
return errors.Wrapf(err, "retrieving or creating kvm")
}
}
config := p.ServerConfig
if config == nil {
config = p.createConfig(cred)
}
if p.IsGCPManaged && (config.Tenant.PrivateKey == nil || p.rotate > 0) {
var keyID string
var privateKey *rsa.PrivateKey
var jwks jwk.Set
var err error
if p.isCloud() { // attempt to fetch secrets from propertysets
keyID, privateKey, jwks, err = p.policySecretsFromPropertyset()
}
if err != nil || privateKey == nil {
verbosef("no existing policy secret, creating new ones")
keyID, privateKey, jwks, err = p.CreateNewKey()
if err != nil {
return err
}
}
config.Tenant.PrivateKey = privateKey
config.Tenant.PrivateKeyID = keyID
if jwks, err = p.RotateJWKS(jwks, p.rotate); err != nil {
return err
}
config.Tenant.JWKS = jwks
}
var verifyErrors error
if p.IsGCPManaged {
verifyErrors = p.verifyWithRetry(config, verbosef)
// creates the policy secrets if is GCP managed
if err := p.createPolicySecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating policy secret data")
}
// create the analytics secrets if is GCP managed
if err := p.createAnalyticsSecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating analytics secret data")
}
if len(p.analyticsSecretData) == 0 {
shared.Errorf("\nWARNING: No analytics service account given via --analytics-sa or config.yaml.")
shared.Errorf("\nIMPORTANT: Please make sure the application default credentials where the adapter is run are correctly configured.")
}
} else {
verifyErrors = p.verifyWithoutRetry(config, verbosef)
}
if err := p.printConfig(config, printf, verifyErrors, verbosef); err != nil {
return errors.Wrapf(err, "generating config")
}
if verifyErrors == nil {
verbosef("provisioning verified OK")
}
// return possible errors if not hybrid
if !p.IsGCPManaged || p.isCloud() {
return verifyErrors
}
// output this warning for hybrid
if p.rotate > 0 {
shared.Errorf("\nIMPORTANT: Provisioned config with rotated secrets needs to be applied onto the k8s cluster to take effect.")
} else {
shared.Errorf("\nIMPORTANT: Provisioned config needs to be applied onto the k8s cluster to take effect.")
}
return verifyErrors
}
// retrieveRuntimeType fetches the organization information from the management base and extracts the runtime type
func (p *provision) retrieveRuntimeType() error {
req, err := p.ApigeeClient.NewRequestNoEnv(http.MethodGet, "", nil)
if err != nil {
return err
}
org := &apigee.Organization{}
if _, err := p.ApigeeClient.Do(req, org); err != nil {
return err
}
p.runtimeType = org.RuntimeType
return nil
}
// isCloud determines whether it is NG SaaS
func (p *provision) isCloud() bool {
return p.IsGCPManaged && p.runtimeType == "CLOUD"
}
// policySecretsFromPropertyset retrieves the policy secret from the remote-service propertyset
// the returned values will be empty or nil if such propertyset does not exist or there is other error fetching it
func (p *provision) policySecretsFromPropertyset() (keyID string, privateKey *rsa.PrivateKey, jwks jwk.Set, err error) {
req, err := p.ApigeeClient.NewRequest(http.MethodGet, fmt.Sprintf(propertysetGETOrPUTURL, "remote-service"), nil)
if err != nil {
return
}
buf := new(bytes.Buffer)
_, err = p.ApigeeClient.Do(req, buf)
if err != nil {
return
}
// read the response into a map
m, err := server.ReadProperties(buf)
if err != nil {
return
}
// extracts the jwks from the map
jwksStr, ok := m["crt"]
if !ok {
err = fmt.Errorf("crt not found in remote-service propertyset")
return
}
jwks = jwk.NewSet()
err = json.Unmarshal([]byte(jwksStr), jwks)
if err != nil {
return
}
// extracts the private key from the map
pkStr, ok := m["key"]
if !ok {
err = fmt.Errorf("key not found in remote-service propertyset")
return
}
privateKey, err = server.LoadPrivateKey([]byte(strings.ReplaceAll(pkStr, `\n`, "\n")))
if err != nil {
return
}
// extracts the key id from the map
keyID, ok = m[server.SecretPropsKIDKey]
if !ok {
err = fmt.Errorf("kid not found in remote-service propertyset")
return
}
return
}
func (p *provision) | createAuthorizedClient | identifier_name |
|
provision.go | ee SaaS (sets management and runtime URL)")
c.Flags().BoolVarP(&rootArgs.IsOPDK, "opdk", "", false,
"Apigee opdk")
c.Flags().StringVarP(&rootArgs.Token, "token", "t", "",
"Apigee OAuth or SAML token (overrides any other given credentials)")
c.Flags().StringVarP(&rootArgs.Username, "username", "u", "",
"Apigee username (legacy or opdk only)")
c.Flags().StringVarP(&rootArgs.Password, "password", "p", "",
"Apigee password (legacy or opdk only)")
c.Flags().StringVarP(&rootArgs.MFAToken, "mfa", "", "",
"Apigee multi-factor authorization token (legacy only)")
c.Flags().StringVarP(&p.analyticsServiceAccount, "analytics-sa", "", "",
"path to the service account json file (for GCP-managed analytics only)")
c.Flags().BoolVarP(&p.forceProxyInstall, "force-proxy-install", "f", false,
"force new proxy install (upgrades proxy)")
c.Flags().StringVarP(&p.virtualHosts, "virtual-hosts", "", "default,secure",
"override proxy virtualHosts")
c.Flags().StringVarP(&p.Namespace, "namespace", "n", "apigee",
"emit configuration in the specified namespace")
c.Flags().IntVarP(&p.rotate, "rotate", "", 0, "if n > 0, generate new private key and keep n public keys (hybrid only)")
return c
}
func (p *provision) run(printf shared.FormatFn) error {
var cred *keySecret
var verbosef = shared.NoPrintf
if p.Verbose {
verbosef = shared.Errorf
}
tempDir, err := os.MkdirTemp("", "apigee")
if err != nil {
return errors.Wrap(err, "creating temp dir")
}
defer os.RemoveAll(tempDir)
replaceVH := func(proxyDir string) error {
proxiesFile := filepath.Join(proxyDir, "proxies", "default.xml")
bytes, err := os.ReadFile(proxiesFile)
if err != nil {
return errors.Wrapf(err, "reading file %s", proxiesFile)
}
newVH := ""
for _, vh := range strings.Split(p.virtualHosts, ",") {
if strings.TrimSpace(vh) != "" {
newVH = newVH + fmt.Sprintf(virtualHostReplacementFmt, vh)
}
}
// remove all "secure" virtualhost
bytes = []byte(strings.ReplaceAll(string(bytes), virtualHostDeleteText, ""))
// replace the "default" virtualhost
bytes = []byte(strings.Replace(string(bytes), virtualHostReplaceText, newVH, 1))
if err := os.WriteFile(proxiesFile, bytes, 0); err != nil {
return errors.Wrapf(err, "writing file %s", proxiesFile)
}
return nil
}
replaceInFile := func(file, old, new string) error {
bytes, err := os.ReadFile(file)
if err != nil {
return errors.Wrapf(err, "reading file %s", file)
}
bytes = []byte(strings.Replace(string(bytes), old, new, 1))
if err := os.WriteFile(file, bytes, 0); err != nil {
return errors.Wrapf(err, "writing file %s", file)
}
return nil
}
// replace the version using the build info
replaceVersion := func(proxyDir string) error {
calloutFile := filepath.Join(proxyDir, "policies", "Send-Version.xml")
oldValue := `"version":"{{version}}"`
newValue := fmt.Sprintf(`"version":"%s"`, shared.BuildInfo.Version)
if err := replaceInFile(calloutFile, oldValue, newValue); err != nil {
return err
}
return nil
}
replaceVHAndAuthTarget := func(proxyDir string) error {
if err := replaceVH(proxyDir); err != nil {
return err
}
if err := replaceVersion(proxyDir); err != nil {
return err
}
if p.IsOPDK {
// OPDK must target local internal proxy
authFile := filepath.Join(proxyDir, "policies", "Authenticate-Call.xml")
oldTarget := "https://edgemicroservices.apigee.net"
newTarget := p.RuntimeBase
if err := replaceInFile(authFile, oldTarget, newTarget); err != nil {
return err
}
// OPDK must have org.noncps = true for products callout
calloutFile := filepath.Join(proxyDir, "policies", "JavaCallout.xml")
oldValue := "</Properties>"
newValue := `<Property name="org.noncps">true</Property>
</Properties>`
if err := replaceInFile(calloutFile, oldValue, newValue); err != nil {
return err
}
}
return nil
}
if p.IsOPDK {
if err := p.deployInternalProxy(replaceVH, tempDir, verbosef); err != nil {
return errors.Wrap(err, "deploying internal proxy")
}
}
// deploy remote-service proxy
var customizedProxy string
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteServiceProxy, replaceVersion)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyServiceProxy, replaceVHAndAuthTarget)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(authProxyName, customizedProxy, p.forceProxyInstall, verbosef); err != nil {
return errors.Wrapf(err, "deploying runtime proxy %s", authProxyName)
}
// Deploy remote-token proxy
if p.IsGCPManaged {
customizedProxy, err = getCustomizedProxy(tempDir, remoteTokenProxy, nil)
} else {
customizedProxy, err = getCustomizedProxy(tempDir, legacyTokenProxy, replaceVH)
}
if err != nil {
return err
}
if err := p.checkAndDeployProxy(tokenProxyName, customizedProxy, false, verbosef); err != nil {
return errors.Wrapf(err, "deploying token proxy %s", tokenProxyName)
}
if !p.IsGCPManaged {
cred, err = p.createLegacyCredential(verbosef) // TODO: on missing or force new cred
if err != nil {
return errors.Wrapf(err, "generating credential")
}
if err := p.getOrCreateKVM(cred, verbosef); err != nil {
return errors.Wrapf(err, "retrieving or creating kvm")
}
}
config := p.ServerConfig
if config == nil {
config = p.createConfig(cred)
}
if p.IsGCPManaged && (config.Tenant.PrivateKey == nil || p.rotate > 0) {
var keyID string
var privateKey *rsa.PrivateKey
var jwks jwk.Set
var err error
if p.isCloud() { // attempt to fetch secrets from propertysets
keyID, privateKey, jwks, err = p.policySecretsFromPropertyset()
}
if err != nil || privateKey == nil {
verbosef("no existing policy secret, creating new ones")
keyID, privateKey, jwks, err = p.CreateNewKey()
if err != nil {
return err
}
}
config.Tenant.PrivateKey = privateKey
config.Tenant.PrivateKeyID = keyID
if jwks, err = p.RotateJWKS(jwks, p.rotate); err != nil {
return err
}
config.Tenant.JWKS = jwks
}
var verifyErrors error
if p.IsGCPManaged {
verifyErrors = p.verifyWithRetry(config, verbosef)
// creates the policy secrets if is GCP managed
if err := p.createPolicySecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating policy secret data")
}
// create the analytics secrets if is GCP managed
if err := p.createAnalyticsSecretData(config, verbosef); err != nil {
return errors.Wrapf(err, "creating analytics secret data")
}
if len(p.analyticsSecretData) == 0 {
shared.Errorf("\nWARNING: No analytics service account given via --analytics-sa or config.yaml.")
shared.Errorf("\nIMPORTANT: Please make sure the application default credentials where the adapter is run are correctly configured.")
}
} else {
verifyErrors = p.verifyWithoutRetry(config, verbosef)
}
if err := p.printConfig(config, printf, verifyErrors, verbosef); err != nil {
return errors.Wrapf(err, "generating config")
}
if verifyErrors == nil |
// return possible errors if not hybrid
if !p.IsGCPManaged || p.isCloud() {
return verifyErrors
}
// output this warning for hybrid
if p.rotate > 0 {
shared.Errorf("\nIMPORTANT: Provisioned config | {
verbosef("provisioning verified OK")
} | conditional_block |
bighosts.py |
# | ([0-9a-fA-F]{1,4}){0,7}::?([0-9a-fA-F]{1,4}) # ipv6
# ''',re.VERBOSE)
def __init__(self, a, h):
'''The address parameter is checked against the format
regexp. self.hostnames is a list but the hostname parameter
can be given as a list of strings or a simple string.'''
if self.format.match(a): self.address=a
else: raise "Address format error : %s" % a
self.hostnames = h
def __repr__(self, col1width=3):
|
class Hosts(dict):
'''This class features two dictionnaries addresses and hostnames
pointing to Address instances. The dict accessors are overriden
for the object to appear as a dictionnary which can be accessed
both ways, depending on whether the parameter matches the address
format'''
#
#
__address=Address.format
line=re.compile(
'''(^
(?P<info>
(?P<address> (\d{1,3}\.){3}\d{1,3})
(?P<sep> \s+)
(?P<hostnames> .+) )
(?P<optcmt> \s+\#.*)?
$)|(^
.*
$)''', re.VERBOSE)
def __iter__(self):
return self
def next(self):
for a in self.addresses:
yield a
def __init__(self, defaults=None, **kwargs):
# Ce serait sympa de creer le dict a partir d'un dict
self.addresses={}
self.hostnames={}
self.filename=filename
if filename and type(filename)==str:
self.read(filename)
elif filename and type(filename)==dict:
for k,v in defaults.items(): self[k]=v
if kwargs:
for k,v in kwargs.items(): self[k]=v
def __repr__(self):
'''Represents itself as a common /etc/hosts file'''
# defaults ordering anyone? localhost at the top..
# especially useful for the ifcfg files device at the top and ipaddr right then
lines = map( repr, self.addresses.values())
return "".join( [ l for l in lines if l.find('~del~')==-1 ] )
def __getitem__(self,item):
'''If the item is an address, returns the hostnames, else return
the address.'''
if self.__address.match(item):
return self.addresses[item].hostnames
else:
return self.hostnames[item].address
def __delitem__(self,item):
'''Removes a whole line 'address - hostnames'. Can be called
by address or hostname'''
if self.__address.match(item):
a, h = item, self.addresses[item].hostnames
else:
a, h = self.hostnames[item].address, self.hostnames[item].hostnames
# The references in both indexes are del. The Address
# instance is deleted by the python garbage collector
del self.addresses[a]
for i in h.split(): del self.hostnames[i]
def __setitem__(self, item, value):
'''Various case : the address or host already exists. The host can be
a list of strings. Say 10 mc'''
if self.__address.match(item): a, h = item, value
else: h, a = item, value
if not a in self.addresses and ( not h in self.hostnames or h=='~del~' ):
# New address and new hostname
# Create an instance, and 2 references
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
elif h in self.hostnames and not a in self.addresses:
# Modifying the address of an existing hostname:
# deletion of the old ref in self.addresses
# new entry in self.addresses
# modification of the address attribute in the instance
del self.addresses[self[h]]
self.addresses[a] = self.hostnames[h]
self.hostnames[h].address = a
elif ( h=='~del~' or not h in self.hostnames ) and a in self.addresses:
# Renaming an address
# deletion of the old entries in hostnames
# new entry in self.hostnames
# reset of the hostnames attribute in the instance
print self[a],h
for i in self[a].split(' '): del self.hostnames[i]
self.hostnames[h] = self.addresses[a]
self.addresses[a].hostnames = h
elif h in self.hostnames and a in self.addresses and self[h]!=a:
# Do we want to keep old references and alias: no
del self[a]
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
def reprline(self,item):
if self.__address.match(item):
return repr(self.addresses[item])
else:
return repr(self.hostnames[item])
def append(self,item,value):
if self.__address.match(item): a, h = item, value
else: h, a = item, value
self.hostnames[h]=self.addresses[a]
self.hostnames[h].hostnames.append(h)
def read(self,filenames):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, basestring):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
fp = open(filename)
except IOError:
continue
self._read(fp)
fp.close()
read_ok.append(filename)
return read_ok
def _read(self, fp):
'''The file parameter can be a filename or a file
descriptor. The file should conform to the common hosts format'''
for l in fp:
a, h = self.line.match( l).group('address', 'hostnames')
if a:
for i in h.split(): self[h]=a
def write(self,filename=None):
filename = filename or self.filename
c=Hosts() # Because we pop the written lines, we pop on a copy
c.addresses, c.hostnames = self.addresses.copy(), self.hostnames.copy()
f=file(filename+'~','w')
if os.path.isfile(filename):
for l in file(filename):
a, h = c.line.match(l).group('address', 'hostnames')
if a:
for i in h.split()+[a]:
try: print c[i].find('~del~')
except: pass
if i in c.hostnames.keys() + c.addresses.keys() and c[i].find('~del~')!=-1:
f.write( self.line.sub( c.reprline(i)[:-1], l ))
del c[i]
break
else:
f.write(l)
else:
f.write(l)
# else:
# if a in c.addresses:
# if c[a][0]!='~del~':
# f.write( self.__address.sub( repr(c.addresses[a])[:-1], l ))
# del c[a]
# else:
# f.write(l)
# if a and h:
# i=[ i for i in h.split(), a if i in c and c[i]!='~del~'].pop():
# if i:
# f.write( self.line.sub( repr(c.hostnames[i])[:-1], l ))
# del c[i]
# else: f.write(l)
# else: f.write(l)
f.write(repr(c))
f.close()
os.rename(filename+'~',filename)
if __name__=="__main__":
# The idea is to get the iterator to work and make write uses dict
from os import getenv
# Parse /etc/hosts and its different constructors (input file, kwargs and dict)
h=Hosts()
h['1.0.0.1']='mc'
print h['1.0.0.1']
print h['mc']
del h['mc']
print h['mc']
h['mc']='1.0.0.1'
print h['1.0.0.1']
print h['mc']
del h['1.0.0.1']
print h[' | '''Common /etc/hosts start the hostname columns at the 24th
character and separate the two columns by as much tabs as
needed'''
sep="\t"*((col1width*8-1-len(self.address))/8+1)
return repr("%s%s%s\n" % ( self.address, sep, self.hostnames )) | identifier_body |
bighosts.py |
# | ([0-9a-fA-F]{1,4}){0,7}::?([0-9a-fA-F]{1,4}) # ipv6
# ''',re.VERBOSE)
def __init__(self, a, h):
'''The address parameter is checked against the format
regexp. self.hostnames is a list but the hostname parameter
can be given as a list of strings or a simple string.'''
if self.format.match(a): self.address=a
else: raise "Address format error : %s" % a
self.hostnames = h
def __repr__(self, col1width=3):
'''Common /etc/hosts start the hostname columns at the 24th
character and separate the two columns by as much tabs as
needed'''
sep="\t"*((col1width*8-1-len(self.address))/8+1)
return repr("%s%s%s\n" % ( self.address, sep, self.hostnames ))
class Hosts(dict):
'''This class features two dictionnaries addresses and hostnames
pointing to Address instances. The dict accessors are overriden
for the object to appear as a dictionnary which can be accessed
both ways, depending on whether the parameter matches the address
format'''
#
#
__address=Address.format
line=re.compile(
'''(^
(?P<info>
(?P<address> (\d{1,3}\.){3}\d{1,3})
(?P<sep> \s+)
(?P<hostnames> .+) )
(?P<optcmt> \s+\#.*)?
$)|(^
.*
$)''', re.VERBOSE)
def __iter__(self):
return self
def next(self):
for a in self.addresses:
yield a
def __init__(self, defaults=None, **kwargs):
# Ce serait sympa de creer le dict a partir d'un dict
self.addresses={}
self.hostnames={}
self.filename=filename
if filename and type(filename)==str:
self.read(filename)
elif filename and type(filename)==dict:
for k,v in defaults.items(): self[k]=v
if kwargs:
for k,v in kwargs.items(): self[k]=v
def __repr__(self):
'''Represents itself as a common /etc/hosts file'''
# defaults ordering anyone? localhost at the top..
# especially useful for the ifcfg files device at the top and ipaddr right then
lines = map( repr, self.addresses.values())
return "".join( [ l for l in lines if l.find('~del~')==-1 ] )
def __getitem__(self,item):
'''If the item is an address, returns the hostnames, else return
the address.'''
if self.__address.match(item):
return self.addresses[item].hostnames
else:
return self.hostnames[item].address
def __delitem__(self,item):
'''Removes a whole line 'address - hostnames'. Can be called
by address or hostname'''
if self.__address.match(item):
a, h = item, self.addresses[item].hostnames
else:
a, h = self.hostnames[item].address, self.hostnames[item].hostnames
# The references in both indexes are del. The Address
# instance is deleted by the python garbage collector
del self.addresses[a]
for i in h.split(): del self.hostnames[i]
def __setitem__(self, item, value):
'''Various case : the address or host already exists. The host can be
a list of strings. Say 10 mc'''
if self.__address.match(item): |
else: h, a = item, value
if not a in self.addresses and ( not h in self.hostnames or h=='~del~' ):
# New address and new hostname
# Create an instance, and 2 references
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
elif h in self.hostnames and not a in self.addresses:
# Modifying the address of an existing hostname:
# deletion of the old ref in self.addresses
# new entry in self.addresses
# modification of the address attribute in the instance
del self.addresses[self[h]]
self.addresses[a] = self.hostnames[h]
self.hostnames[h].address = a
elif ( h=='~del~' or not h in self.hostnames ) and a in self.addresses:
# Renaming an address
# deletion of the old entries in hostnames
# new entry in self.hostnames
# reset of the hostnames attribute in the instance
print self[a],h
for i in self[a].split(' '): del self.hostnames[i]
self.hostnames[h] = self.addresses[a]
self.addresses[a].hostnames = h
elif h in self.hostnames and a in self.addresses and self[h]!=a:
# Do we want to keep old references and alias: no
del self[a]
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
def reprline(self,item):
if self.__address.match(item):
return repr(self.addresses[item])
else:
return repr(self.hostnames[item])
def append(self,item,value):
if self.__address.match(item): a, h = item, value
else: h, a = item, value
self.hostnames[h]=self.addresses[a]
self.hostnames[h].hostnames.append(h)
def read(self,filenames):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, basestring):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
fp = open(filename)
except IOError:
continue
self._read(fp)
fp.close()
read_ok.append(filename)
return read_ok
def _read(self, fp):
'''The file parameter can be a filename or a file
descriptor. The file should conform to the common hosts format'''
for l in fp:
a, h = self.line.match( l).group('address', 'hostnames')
if a:
for i in h.split(): self[h]=a
def write(self,filename=None):
filename = filename or self.filename
c=Hosts() # Because we pop the written lines, we pop on a copy
c.addresses, c.hostnames = self.addresses.copy(), self.hostnames.copy()
f=file(filename+'~','w')
if os.path.isfile(filename):
for l in file(filename):
a, h = c.line.match(l).group('address', 'hostnames')
if a:
for i in h.split()+[a]:
try: print c[i].find('~del~')
except: pass
if i in c.hostnames.keys() + c.addresses.keys() and c[i].find('~del~')!=-1:
f.write( self.line.sub( c.reprline(i)[:-1], l ))
del c[i]
break
else:
f.write(l)
else:
f.write(l)
# else:
# if a in c.addresses:
# if c[a][0]!='~del~':
# f.write( self.__address.sub( repr(c.addresses[a])[:-1], l ))
# del c[a]
# else:
# f.write(l)
# if a and h:
# i=[ i for i in h.split(), a if i in c and c[i]!='~del~'].pop():
# if i:
# f.write( self.line.sub( repr(c.hostnames[i])[:-1], l ))
# del c[i]
# else: f.write(l)
# else: f.write(l)
f.write(repr(c))
f.close()
os.rename(filename+'~',filename)
if __name__=="__main__":
# The idea is to get the iterator to work and make write uses dict
from os import getenv
# Parse /etc/hosts and its different constructors (input file, kwargs and dict)
h=Hosts()
h['1.0.0.1']='mc'
print h['1.0.0.1']
print h['mc']
del h['mc']
print h['mc']
h['mc']='1.0.0.1'
print h['1.0.0.1']
print h['mc']
del h['1.0.0.1']
print h['mc | a, h = item, value | conditional_block |
bighosts.py |
# | ([0-9a-fA-F]{1,4}){0,7}::?([0-9a-fA-F]{1,4}) # ipv6
# ''',re.VERBOSE)
def __init__(self, a, h):
'''The address parameter is checked against the format
regexp. self.hostnames is a list but the hostname parameter
can be given as a list of strings or a simple string.'''
if self.format.match(a): self.address=a
else: raise "Address format error : %s" % a
self.hostnames = h
def __repr__(self, col1width=3):
'''Common /etc/hosts start the hostname columns at the 24th
character and separate the two columns by as much tabs as
needed'''
sep="\t"*((col1width*8-1-len(self.address))/8+1)
return repr("%s%s%s\n" % ( self.address, sep, self.hostnames ))
class Hosts(dict):
'''This class features two dictionnaries addresses and hostnames
pointing to Address instances. The dict accessors are overriden
for the object to appear as a dictionnary which can be accessed
both ways, depending on whether the parameter matches the address
format'''
#
#
__address=Address.format
line=re.compile(
'''(^
(?P<info>
(?P<address> (\d{1,3}\.){3}\d{1,3})
(?P<sep> \s+)
(?P<hostnames> .+) )
(?P<optcmt> \s+\#.*)?
$)|(^
.*
$)''', re.VERBOSE)
def __iter__(self):
return self
def next(self):
for a in self.addresses:
yield a
def __init__(self, defaults=None, **kwargs):
# Ce serait sympa de creer le dict a partir d'un dict
self.addresses={}
self.hostnames={}
self.filename=filename
if filename and type(filename)==str:
self.read(filename)
elif filename and type(filename)==dict:
for k,v in defaults.items(): self[k]=v
if kwargs:
for k,v in kwargs.items(): self[k]=v
def __repr__(self):
'''Represents itself as a common /etc/hosts file'''
# defaults ordering anyone? localhost at the top..
# especially useful for the ifcfg files device at the top and ipaddr right then
lines = map( repr, self.addresses.values())
return "".join( [ l for l in lines if l.find('~del~')==-1 ] )
def __getitem__(self,item):
'''If the item is an address, returns the hostnames, else return
the address.'''
if self.__address.match(item):
return self.addresses[item].hostnames
else:
return self.hostnames[item].address
def __delitem__(self,item):
'''Removes a whole line 'address - hostnames'. Can be called
by address or hostname'''
if self.__address.match(item):
a, h = item, self.addresses[item].hostnames
else:
a, h = self.hostnames[item].address, self.hostnames[item].hostnames
# The references in both indexes are del. The Address
# instance is deleted by the python garbage collector
del self.addresses[a]
for i in h.split(): del self.hostnames[i]
def __setitem__(self, item, value):
'''Various case : the address or host already exists. The host can be
a list of strings. Say 10 mc'''
if self.__address.match(item): a, h = item, value
else: h, a = item, value
if not a in self.addresses and ( not h in self.hostnames or h=='~del~' ):
# New address and new hostname
# Create an instance, and 2 references
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
elif h in self.hostnames and not a in self.addresses:
# Modifying the address of an existing hostname:
# deletion of the old ref in self.addresses
# new entry in self.addresses
# modification of the address attribute in the instance
del self.addresses[self[h]]
self.addresses[a] = self.hostnames[h]
self.hostnames[h].address = a
elif ( h=='~del~' or not h in self.hostnames ) and a in self.addresses:
# Renaming an address
# deletion of the old entries in hostnames
# new entry in self.hostnames
# reset of the hostnames attribute in the instance
print self[a],h
for i in self[a].split(' '): del self.hostnames[i]
self.hostnames[h] = self.addresses[a]
self.addresses[a].hostnames = h
elif h in self.hostnames and a in self.addresses and self[h]!=a:
# Do we want to keep old references and alias: no
del self[a]
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
def reprline(self,item):
if self.__address.match(item):
return repr(self.addresses[item])
else:
return repr(self.hostnames[item])
def append(self,item,value):
if self.__address.match(item): a, h = item, value
else: h, a = item, value
self.hostnames[h]=self.addresses[a]
self.hostnames[h].hostnames.append(h)
def read(self,filenames):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, basestring):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
fp = open(filename)
except IOError:
continue
self._read(fp)
fp.close()
read_ok.append(filename)
return read_ok
def _read(self, fp):
'''The file parameter can be a filename or a file
descriptor. The file should conform to the common hosts format'''
for l in fp:
a, h = self.line.match( l).group('address', 'hostnames')
if a:
for i in h.split(): self[h]=a
def write(self,filename=None):
filename = filename or self.filename
c=Hosts() # Because we pop the written lines, we pop on a copy
c.addresses, c.hostnames = self.addresses.copy(), self.hostnames.copy()
f=file(filename+'~','w')
if os.path.isfile(filename):
for l in file(filename):
a, h = c.line.match(l).group('address', 'hostnames')
if a:
for i in h.split()+[a]:
try: print c[i].find('~del~')
except: pass
if i in c.hostnames.keys() + c.addresses.keys() and c[i].find('~del~')!=-1:
f.write( self.line.sub( c.reprline(i)[:-1], l ))
del c[i]
break
else:
f.write(l)
else:
f.write(l)
# else: |
# else:
# f.write(l)
# if a and h:
# i=[ i for i in h.split(), a if i in c and c[i]!='~del~'].pop():
# if i:
# f.write( self.line.sub( repr(c.hostnames[i])[:-1], l ))
# del c[i]
# else: f.write(l)
# else: f.write(l)
f.write(repr(c))
f.close()
os.rename(filename+'~',filename)
if __name__=="__main__":
# The idea is to get the iterator to work and make write uses dict
from os import getenv
# Parse /etc/hosts and its different constructors (input file, kwargs and dict)
h=Hosts()
h['1.0.0.1']='mc'
print h['1.0.0.1']
print h['mc']
del h['mc']
print h['mc']
h['mc']='1.0.0.1'
print h['1.0.0.1']
print h['mc']
del h['1.0.0.1']
print h[' | # if a in c.addresses:
# if c[a][0]!='~del~':
# f.write( self.__address.sub( repr(c.addresses[a])[:-1], l ))
# del c[a] | random_line_split |
bighosts.py |
# | ([0-9a-fA-F]{1,4}){0,7}::?([0-9a-fA-F]{1,4}) # ipv6
# ''',re.VERBOSE)
def __init__(self, a, h):
'''The address parameter is checked against the format
regexp. self.hostnames is a list but the hostname parameter
can be given as a list of strings or a simple string.'''
if self.format.match(a): self.address=a
else: raise "Address format error : %s" % a
self.hostnames = h
def __repr__(self, col1width=3):
'''Common /etc/hosts start the hostname columns at the 24th
character and separate the two columns by as much tabs as
needed'''
sep="\t"*((col1width*8-1-len(self.address))/8+1)
return repr("%s%s%s\n" % ( self.address, sep, self.hostnames ))
class Hosts(dict):
'''This class features two dictionnaries addresses and hostnames
pointing to Address instances. The dict accessors are overriden
for the object to appear as a dictionnary which can be accessed
both ways, depending on whether the parameter matches the address
format'''
#
#
__address=Address.format
line=re.compile(
'''(^
(?P<info>
(?P<address> (\d{1,3}\.){3}\d{1,3})
(?P<sep> \s+)
(?P<hostnames> .+) )
(?P<optcmt> \s+\#.*)?
$)|(^
.*
$)''', re.VERBOSE)
def __iter__(self):
return self
def next(self):
for a in self.addresses:
yield a
def __init__(self, defaults=None, **kwargs):
# Ce serait sympa de creer le dict a partir d'un dict
self.addresses={}
self.hostnames={}
self.filename=filename
if filename and type(filename)==str:
self.read(filename)
elif filename and type(filename)==dict:
for k,v in defaults.items(): self[k]=v
if kwargs:
for k,v in kwargs.items(): self[k]=v
def __repr__(self):
'''Represents itself as a common /etc/hosts file'''
# defaults ordering anyone? localhost at the top..
# especially useful for the ifcfg files device at the top and ipaddr right then
lines = map( repr, self.addresses.values())
return "".join( [ l for l in lines if l.find('~del~')==-1 ] )
def __getitem__(self,item):
'''If the item is an address, returns the hostnames, else return
the address.'''
if self.__address.match(item):
return self.addresses[item].hostnames
else:
return self.hostnames[item].address
def | (self,item):
'''Removes a whole line 'address - hostnames'. Can be called
by address or hostname'''
if self.__address.match(item):
a, h = item, self.addresses[item].hostnames
else:
a, h = self.hostnames[item].address, self.hostnames[item].hostnames
# The references in both indexes are del. The Address
# instance is deleted by the python garbage collector
del self.addresses[a]
for i in h.split(): del self.hostnames[i]
def __setitem__(self, item, value):
'''Various case : the address or host already exists. The host can be
a list of strings. Say 10 mc'''
if self.__address.match(item): a, h = item, value
else: h, a = item, value
if not a in self.addresses and ( not h in self.hostnames or h=='~del~' ):
# New address and new hostname
# Create an instance, and 2 references
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
elif h in self.hostnames and not a in self.addresses:
# Modifying the address of an existing hostname:
# deletion of the old ref in self.addresses
# new entry in self.addresses
# modification of the address attribute in the instance
del self.addresses[self[h]]
self.addresses[a] = self.hostnames[h]
self.hostnames[h].address = a
elif ( h=='~del~' or not h in self.hostnames ) and a in self.addresses:
# Renaming an address
# deletion of the old entries in hostnames
# new entry in self.hostnames
# reset of the hostnames attribute in the instance
print self[a],h
for i in self[a].split(' '): del self.hostnames[i]
self.hostnames[h] = self.addresses[a]
self.addresses[a].hostnames = h
elif h in self.hostnames and a in self.addresses and self[h]!=a:
# Do we want to keep old references and alias: no
del self[a]
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
def reprline(self,item):
if self.__address.match(item):
return repr(self.addresses[item])
else:
return repr(self.hostnames[item])
def append(self,item,value):
if self.__address.match(item): a, h = item, value
else: h, a = item, value
self.hostnames[h]=self.addresses[a]
self.hostnames[h].hostnames.append(h)
def read(self,filenames):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, basestring):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
fp = open(filename)
except IOError:
continue
self._read(fp)
fp.close()
read_ok.append(filename)
return read_ok
def _read(self, fp):
'''The file parameter can be a filename or a file
descriptor. The file should conform to the common hosts format'''
for l in fp:
a, h = self.line.match( l).group('address', 'hostnames')
if a:
for i in h.split(): self[h]=a
def write(self,filename=None):
filename = filename or self.filename
c=Hosts() # Because we pop the written lines, we pop on a copy
c.addresses, c.hostnames = self.addresses.copy(), self.hostnames.copy()
f=file(filename+'~','w')
if os.path.isfile(filename):
for l in file(filename):
a, h = c.line.match(l).group('address', 'hostnames')
if a:
for i in h.split()+[a]:
try: print c[i].find('~del~')
except: pass
if i in c.hostnames.keys() + c.addresses.keys() and c[i].find('~del~')!=-1:
f.write( self.line.sub( c.reprline(i)[:-1], l ))
del c[i]
break
else:
f.write(l)
else:
f.write(l)
# else:
# if a in c.addresses:
# if c[a][0]!='~del~':
# f.write( self.__address.sub( repr(c.addresses[a])[:-1], l ))
# del c[a]
# else:
# f.write(l)
# if a and h:
# i=[ i for i in h.split(), a if i in c and c[i]!='~del~'].pop():
# if i:
# f.write( self.line.sub( repr(c.hostnames[i])[:-1], l ))
# del c[i]
# else: f.write(l)
# else: f.write(l)
f.write(repr(c))
f.close()
os.rename(filename+'~',filename)
if __name__=="__main__":
# The idea is to get the iterator to work and make write uses dict
from os import getenv
# Parse /etc/hosts and its different constructors (input file, kwargs and dict)
h=Hosts()
h['1.0.0.1']='mc'
print h['1.0.0.1']
print h['mc']
del h['mc']
print h['mc']
h['mc']='1.0.0.1'
print h['1.0.0.1']
print h['mc']
del h['1.0.0.1']
print h[' | __delitem__ | identifier_name |
ReturnRefundFooter.js | import HeaderSearchComponent from './HeaderSearchComponent'
import PickerLocation from './PickerLocation'
class ReturnRefundFooter extends Component {
constructor () {
super()
this.state = {
selectedAddress: 'Tambah alamat penjemputan',
addressName: '',
name: '',
phone: '',
postalCode: '',
fullAddress: '',
selectedProvince: '',
selectedCity: '',
selectedKecamatan: '',
provinceName: '',
cityName: '',
kecamatanName: '',
kecamatanCode: '',
countryId: '102',
addressId: 0
}
}
setAddress = (addressData) => {
this.setState({
selectedAddress: addressData.address_name,
addressId: addressData.address_id,
addressName: (addressData.address_name.toLowerCase() === 'tambah alamat penjemputan' && addressData.address_id === 0) ? '' : addressData.address_name,
name: `${addressData.first_name} ${!isEmpty(addressData.last_name) ? addressData.last_name : ''}`,
phone: addressData.phone,
postalCode: addressData.post_code,
fullAddress: addressData.full_address,
selectedProvince: addressData.province.province_id,
selectedCity: addressData.city.city_id,
selectedKecamatan: addressData.kecamatan.kecamatan_id,
provinceName: addressData.province.province_name,
cityName: addressData.city.city_name,
kecamatanName: addressData.kecamatan.kecamatan_name,
kecamatanCode: addressData.kecamatan.kecamatan_code
})
this.refs.child.setModal(false)
}
submitData = () => {
const { addressName, name, phone, postalCode, fullAddress, addressId, countryId, selectedProvince, provinceName, selectedCity, cityName, selectedKecamatan, kecamatanName, kecamatanCode } = this.state
let addressData = {
customer_id: this.props.customerId,
address_id: addressId,
address_name: addressName,
first_name: name.split(' ')[0],
last_name: name.substring(name.split(' ')[0].length, name.length),
phone,
full_address: fullAddress,
country_id: countryId,
province_id: selectedProvince,
province_name: provinceName,
city_id: selectedCity,
city_name: cityName,
kecamatan_id: selectedKecamatan,
kecamatan_code: kecamatanCode,
kecamatan_name: kecamatanName,
post_code: postalCode
}
const { invoices, orderDate, selectedItems, itemQty } = this.props
let listOfProducts = []
let listOfInvoices = []
selectedItems.forEach((data, index) => {
let shippingCost = {
shippingAmount: 0,
handlingFee: 0
}
itemQty.forEach((qtyData) => {
if (!isEmpty(invoices[data.invoiceIndex])) {
if (qtyData.salesOrderItemId === invoices[data.invoiceIndex].items[data.itemIndex].sales_order_item_id) {
shippingCost.shippingAmount += invoices[data.invoiceIndex].items[data.itemIndex].shipping_amount * qtyData.qty
shippingCost.handlingFee += invoices[data.invoiceIndex].items[data.itemIndex].handling_fee_adjust * qtyData.qty
}
}
})
let itemQtyIndex = itemQty.findIndex(qtyData => qtyData.salesOrderItemId === invoices[data.invoiceIndex].items[data.itemIndex].sales_order_item_id)
let qty = 1
if (itemQtyIndex >= 0) {
qty = itemQty[itemQtyIndex].qty
}
if (!isEmpty(invoices[data.invoiceIndex])) {
listOfProducts.push({
...invoices[data.invoiceIndex].items[data.itemIndex],
invoiceNo: invoices[data.invoiceIndex].invoice_no,
orderDate,
shipmentStatus: invoices[data.invoiceIndex].shipment_status,
qty
})
let invoiceItem = {
invoiceNo: invoices[data.invoiceIndex].invoice_no,
deliveryMethod: invoices[data.invoiceIndex].delivery_method,
shipmentStatus: invoices[data.invoiceIndex].shipment_status,
shippingAmount: shippingCost.shippingAmount,
handlingFee: shippingCost.handlingFee
}
if (isEmpty(find(listOfInvoices, invoiceItem))) {
listOfInvoices.push(invoiceItem)
}
}
})
this.props.setProductForReturn(listOfProducts, listOfInvoices, addressData)
this.props.navigation.navigate('ReturnRefundDetailPage')
}
setAddressData = (param, data) => this.setState({ [param]: data })
setShippingLocation = (selectedProvince, selectedCity, selectedKecamatan, province, city, kecamatan) => {
const { province_name: provinceName } = find(province, ['province_id', selectedProvince]) || {}
const { city_name: cityName } = find(city, ['city_id', selectedCity]) || {}
const { kecamatan_name: kecamatanName } = find(kecamatan, ['kecamatan_id', selectedKecamatan]) || {}
this.setState({
selectedProvince,
selectedCity,
selectedKecamatan,
cityName,
provinceName,
kecamatanName
})
this.submitData()
}
renderPicker = (address) => {
let extraAddressData = {
address_id: 0,
address_name: 'Tambah alamat penjemputan',
first_name: '',
province: {},
city: {},
kecamatan: {}
}
return (
<FlatList
data={address}
ListFooterComponent={() => (
<TouchableOpacity onPress={() => this.setAddress(extraAddressData)} style={{ marginHorizontal: 12, paddingVertical: 10, justifyContent: 'space-between', flexDirection: 'row', borderBottomColor: '#E0E6ED', borderBottomWidth: 1 }}>
<Text style={{ color: '#757885', fontFamily: 'Quicksand-Regular', fontSize: 16 }}>Tambah alamat penjemputan</Text>
</TouchableOpacity>
)}
renderItem={({ item }) => (
<TouchableOpacity onPress={() => this.setAddress(item)} style={{ marginHorizontal: 12, paddingVertical: 10, justifyContent: 'space-between', flexDirection: 'row', borderBottomColor: '#E0E6ED', borderBottomWidth: 1 }}>
<Text style={{ color: '#757885', fontFamily: 'Quicksand-Regular', fontSize: 16 }}>{item.address_name}</Text>
</TouchableOpacity>
)}
keyExtractor={(item, index) => `address item ${index}`}
/>
)
}
setParentState = (state) => {
this.setState({ state })
}
| () {
const { showData, totalItemSelected, address } = this.props
const { name, selectedAddress, addressName, phone, postalCode, fullAddress, selectedProvince, selectedCity, selectedKecamatan } = this.state
let obj = {
province: {
province_id: selectedProvince
},
city: {
city_id: selectedCity
},
kecamatan: {
kecamatan_id: selectedKecamatan
}
}
let success = true
const skippedStates = ['selectedProvince', 'selectedCity', 'selectedKecamatan', 'provinceName', 'cityName', 'kecamatanName', 'kecamatanCode', 'addressId']
for (let key of Object.keys(this.state)) {
if (isEmpty(this.state[key]) && !skippedStates.includes(key)) {
success = false
}
}
let ButtonComponent = (showData && success) ? ButtonFilledPrimary : ButtonFilledDisabled
let ButtonTextComponent = (showData && success) ? ButtonFilledText : ButtonFilledTextDisabled
return (
<Container>
{(showData) &&
<>
<FontSizeM>Alamat Penjemputan</FontSizeM>
<FormS>
<TouchableOpacity style={{ padding: 10, justifyContent: 'space-between', alignItems: 'center', flexDirection: 'row' }} onPress={() => this.refs.child.setModal(true)}>
<FontSizeM>{selectedAddress}</FontSizeM>
<Icon name='menu-down' size={24} />
</TouchableOpacity>
</FormS>
{selectedAddress.toLowerCase() === 'tambah alamat penjemputan'
? <>
<FontSizeM>Nama Alamat</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} value={addressName} onChangeText={(e) => this.setAddressData('addressName', e)} />
</FormS>
</>
: null
}
<FontSizeM>Nama Lengkap</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} value={name} onChangeText={(e) => this.setAddressData('name', e)} />
</FormS>
<FontSizeM>No telepon</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} keyboardType='numeric' value={phone} onChangeText={e => this.setAddressData('phone', e)} />
</FormS>
<FontSizeM>Kode Pos</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} keyboardType='numeric' maxLength={5} value={postalCode} onChangeText={e => this.setAddressData('postalCode', e)} />
</FormS>
<PickerLocation set | render | identifier_name |
ReturnRefundFooter.js | import HeaderSearchComponent from './HeaderSearchComponent'
import PickerLocation from './PickerLocation'
class ReturnRefundFooter extends Component {
constructor () {
super()
this.state = {
selectedAddress: 'Tambah alamat penjemputan',
addressName: '',
name: '',
phone: '',
postalCode: '',
fullAddress: '',
selectedProvince: '',
selectedCity: '',
selectedKecamatan: '',
provinceName: '',
cityName: '',
kecamatanName: '',
kecamatanCode: '',
countryId: '102',
addressId: 0
}
}
setAddress = (addressData) => {
this.setState({
selectedAddress: addressData.address_name,
addressId: addressData.address_id,
addressName: (addressData.address_name.toLowerCase() === 'tambah alamat penjemputan' && addressData.address_id === 0) ? '' : addressData.address_name,
name: `${addressData.first_name} ${!isEmpty(addressData.last_name) ? addressData.last_name : ''}`,
phone: addressData.phone,
postalCode: addressData.post_code,
fullAddress: addressData.full_address,
selectedProvince: addressData.province.province_id,
selectedCity: addressData.city.city_id,
selectedKecamatan: addressData.kecamatan.kecamatan_id,
provinceName: addressData.province.province_name,
cityName: addressData.city.city_name,
kecamatanName: addressData.kecamatan.kecamatan_name,
kecamatanCode: addressData.kecamatan.kecamatan_code
})
this.refs.child.setModal(false)
}
submitData = () => {
const { addressName, name, phone, postalCode, fullAddress, addressId, countryId, selectedProvince, provinceName, selectedCity, cityName, selectedKecamatan, kecamatanName, kecamatanCode } = this.state
let addressData = {
customer_id: this.props.customerId,
address_id: addressId,
address_name: addressName,
first_name: name.split(' ')[0],
last_name: name.substring(name.split(' ')[0].length, name.length),
phone,
full_address: fullAddress,
country_id: countryId,
province_id: selectedProvince,
province_name: provinceName,
city_id: selectedCity,
city_name: cityName,
kecamatan_id: selectedKecamatan,
kecamatan_code: kecamatanCode,
kecamatan_name: kecamatanName,
post_code: postalCode
}
const { invoices, orderDate, selectedItems, itemQty } = this.props
let listOfProducts = []
let listOfInvoices = []
selectedItems.forEach((data, index) => {
let shippingCost = {
shippingAmount: 0,
handlingFee: 0
}
itemQty.forEach((qtyData) => {
if (!isEmpty(invoices[data.invoiceIndex])) {
if (qtyData.salesOrderItemId === invoices[data.invoiceIndex].items[data.itemIndex].sales_order_item_id) {
shippingCost.shippingAmount += invoices[data.invoiceIndex].items[data.itemIndex].shipping_amount * qtyData.qty
shippingCost.handlingFee += invoices[data.invoiceIndex].items[data.itemIndex].handling_fee_adjust * qtyData.qty
}
}
})
let itemQtyIndex = itemQty.findIndex(qtyData => qtyData.salesOrderItemId === invoices[data.invoiceIndex].items[data.itemIndex].sales_order_item_id)
let qty = 1
if (itemQtyIndex >= 0) {
qty = itemQty[itemQtyIndex].qty
}
if (!isEmpty(invoices[data.invoiceIndex])) {
listOfProducts.push({
...invoices[data.invoiceIndex].items[data.itemIndex],
invoiceNo: invoices[data.invoiceIndex].invoice_no,
orderDate,
shipmentStatus: invoices[data.invoiceIndex].shipment_status,
qty
})
let invoiceItem = {
invoiceNo: invoices[data.invoiceIndex].invoice_no,
deliveryMethod: invoices[data.invoiceIndex].delivery_method,
shipmentStatus: invoices[data.invoiceIndex].shipment_status,
shippingAmount: shippingCost.shippingAmount,
handlingFee: shippingCost.handlingFee
}
if (isEmpty(find(listOfInvoices, invoiceItem))) {
listOfInvoices.push(invoiceItem)
}
}
})
this.props.setProductForReturn(listOfProducts, listOfInvoices, addressData)
this.props.navigation.navigate('ReturnRefundDetailPage')
}
setAddressData = (param, data) => this.setState({ [param]: data })
setShippingLocation = (selectedProvince, selectedCity, selectedKecamatan, province, city, kecamatan) => {
const { province_name: provinceName } = find(province, ['province_id', selectedProvince]) || {}
const { city_name: cityName } = find(city, ['city_id', selectedCity]) || {}
const { kecamatan_name: kecamatanName } = find(kecamatan, ['kecamatan_id', selectedKecamatan]) || {}
this.setState({
selectedProvince,
selectedCity,
selectedKecamatan,
cityName,
provinceName,
kecamatanName
})
this.submitData()
}
renderPicker = (address) => {
let extraAddressData = {
address_id: 0,
address_name: 'Tambah alamat penjemputan',
first_name: '',
province: {},
city: {},
kecamatan: {}
}
return (
<FlatList
data={address}
ListFooterComponent={() => (
<TouchableOpacity onPress={() => this.setAddress(extraAddressData)} style={{ marginHorizontal: 12, paddingVertical: 10, justifyContent: 'space-between', flexDirection: 'row', borderBottomColor: '#E0E6ED', borderBottomWidth: 1 }}>
<Text style={{ color: '#757885', fontFamily: 'Quicksand-Regular', fontSize: 16 }}>Tambah alamat penjemputan</Text>
</TouchableOpacity>
)}
renderItem={({ item }) => (
<TouchableOpacity onPress={() => this.setAddress(item)} style={{ marginHorizontal: 12, paddingVertical: 10, justifyContent: 'space-between', flexDirection: 'row', borderBottomColor: '#E0E6ED', borderBottomWidth: 1 }}>
<Text style={{ color: '#757885', fontFamily: 'Quicksand-Regular', fontSize: 16 }}>{item.address_name}</Text>
</TouchableOpacity>
)}
keyExtractor={(item, index) => `address item ${index}`}
/>
)
}
setParentState = (state) => {
this.setState({ state })
}
render () {
const { showData, totalItemSelected, address } = this.props
const { name, selectedAddress, addressName, phone, postalCode, fullAddress, selectedProvince, selectedCity, selectedKecamatan } = this.state
let obj = {
province: {
province_id: selectedProvince
},
city: {
city_id: selectedCity
},
kecamatan: {
kecamatan_id: selectedKecamatan
}
}
let success = true
const skippedStates = ['selectedProvince', 'selectedCity', 'selectedKecamatan', 'provinceName', 'cityName', 'kecamatanName', 'kecamatanCode', 'addressId']
for (let key of Object.keys(this.state)) {
if (isEmpty(this.state[key]) && !skippedStates.includes(key)) {
success = false
}
}
let ButtonComponent = (showData && success) ? ButtonFilledPrimary : ButtonFilledDisabled
let ButtonTextComponent = (showData && success) ? ButtonFilledText : ButtonFilledTextDisabled
return (
<Container>
{(showData) &&
<>
<FontSizeM>Alamat Penjemputan</FontSizeM>
<FormS>
<TouchableOpacity style={{ padding: 10, justifyContent: 'space-between', alignItems: 'center', flexDirection: 'row' }} onPress={() => this.refs.child.setModal(true)}>
<FontSizeM>{selectedAddress}</FontSizeM>
<Icon name='menu-down' size={24} />
</TouchableOpacity>
</FormS>
{selectedAddress.toLowerCase() === 'tambah alamat penjemputan'
? <>
<FontSizeM>Nama Alamat</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} value={addressName} onChangeText={(e) => this.setAddressData('addressName', e)} />
</FormS>
</>
: null
}
<FontSizeM>Nama Lengkap</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} value={name} onChangeText={(e) => this.setAddressData('name', e)} />
</FormS>
<FontSizeM>No telepon</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} keyboardType='numeric' value={phone} onChangeText={e => this.setAddressData('phone', e)} />
</FormS>
<FontSizeM>Kode Pos</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} keyboardType='numeric' maxLength={5} value={postalCode} onChangeText={e => this.setAddressData('postalCode', e)} />
</FormS>
<PickerLocation set | random_line_split |
||
ReturnRefundFooter.js | HeaderSearchComponent from './HeaderSearchComponent'
import PickerLocation from './PickerLocation'
class ReturnRefundFooter extends Component {
constructor () {
super()
this.state = {
selectedAddress: 'Tambah alamat penjemputan',
addressName: '',
name: '',
phone: '',
postalCode: '',
fullAddress: '',
selectedProvince: '',
selectedCity: '',
selectedKecamatan: '',
provinceName: '',
cityName: '',
kecamatanName: '',
kecamatanCode: '',
countryId: '102',
addressId: 0
}
}
setAddress = (addressData) => {
this.setState({
selectedAddress: addressData.address_name,
addressId: addressData.address_id,
addressName: (addressData.address_name.toLowerCase() === 'tambah alamat penjemputan' && addressData.address_id === 0) ? '' : addressData.address_name,
name: `${addressData.first_name} ${!isEmpty(addressData.last_name) ? addressData.last_name : ''}`,
phone: addressData.phone,
postalCode: addressData.post_code,
fullAddress: addressData.full_address,
selectedProvince: addressData.province.province_id,
selectedCity: addressData.city.city_id,
selectedKecamatan: addressData.kecamatan.kecamatan_id,
provinceName: addressData.province.province_name,
cityName: addressData.city.city_name,
kecamatanName: addressData.kecamatan.kecamatan_name,
kecamatanCode: addressData.kecamatan.kecamatan_code
})
this.refs.child.setModal(false)
}
submitData = () => {
const { addressName, name, phone, postalCode, fullAddress, addressId, countryId, selectedProvince, provinceName, selectedCity, cityName, selectedKecamatan, kecamatanName, kecamatanCode } = this.state
let addressData = {
customer_id: this.props.customerId,
address_id: addressId,
address_name: addressName,
first_name: name.split(' ')[0],
last_name: name.substring(name.split(' ')[0].length, name.length),
phone,
full_address: fullAddress,
country_id: countryId,
province_id: selectedProvince,
province_name: provinceName,
city_id: selectedCity,
city_name: cityName,
kecamatan_id: selectedKecamatan,
kecamatan_code: kecamatanCode,
kecamatan_name: kecamatanName,
post_code: postalCode
}
const { invoices, orderDate, selectedItems, itemQty } = this.props
let listOfProducts = []
let listOfInvoices = []
selectedItems.forEach((data, index) => {
let shippingCost = {
shippingAmount: 0,
handlingFee: 0
}
itemQty.forEach((qtyData) => {
if (!isEmpty(invoices[data.invoiceIndex])) {
if (qtyData.salesOrderItemId === invoices[data.invoiceIndex].items[data.itemIndex].sales_order_item_id) |
}
})
let itemQtyIndex = itemQty.findIndex(qtyData => qtyData.salesOrderItemId === invoices[data.invoiceIndex].items[data.itemIndex].sales_order_item_id)
let qty = 1
if (itemQtyIndex >= 0) {
qty = itemQty[itemQtyIndex].qty
}
if (!isEmpty(invoices[data.invoiceIndex])) {
listOfProducts.push({
...invoices[data.invoiceIndex].items[data.itemIndex],
invoiceNo: invoices[data.invoiceIndex].invoice_no,
orderDate,
shipmentStatus: invoices[data.invoiceIndex].shipment_status,
qty
})
let invoiceItem = {
invoiceNo: invoices[data.invoiceIndex].invoice_no,
deliveryMethod: invoices[data.invoiceIndex].delivery_method,
shipmentStatus: invoices[data.invoiceIndex].shipment_status,
shippingAmount: shippingCost.shippingAmount,
handlingFee: shippingCost.handlingFee
}
if (isEmpty(find(listOfInvoices, invoiceItem))) {
listOfInvoices.push(invoiceItem)
}
}
})
this.props.setProductForReturn(listOfProducts, listOfInvoices, addressData)
this.props.navigation.navigate('ReturnRefundDetailPage')
}
setAddressData = (param, data) => this.setState({ [param]: data })
setShippingLocation = (selectedProvince, selectedCity, selectedKecamatan, province, city, kecamatan) => {
const { province_name: provinceName } = find(province, ['province_id', selectedProvince]) || {}
const { city_name: cityName } = find(city, ['city_id', selectedCity]) || {}
const { kecamatan_name: kecamatanName } = find(kecamatan, ['kecamatan_id', selectedKecamatan]) || {}
this.setState({
selectedProvince,
selectedCity,
selectedKecamatan,
cityName,
provinceName,
kecamatanName
})
this.submitData()
}
renderPicker = (address) => {
let extraAddressData = {
address_id: 0,
address_name: 'Tambah alamat penjemputan',
first_name: '',
province: {},
city: {},
kecamatan: {}
}
return (
<FlatList
data={address}
ListFooterComponent={() => (
<TouchableOpacity onPress={() => this.setAddress(extraAddressData)} style={{ marginHorizontal: 12, paddingVertical: 10, justifyContent: 'space-between', flexDirection: 'row', borderBottomColor: '#E0E6ED', borderBottomWidth: 1 }}>
<Text style={{ color: '#757885', fontFamily: 'Quicksand-Regular', fontSize: 16 }}>Tambah alamat penjemputan</Text>
</TouchableOpacity>
)}
renderItem={({ item }) => (
<TouchableOpacity onPress={() => this.setAddress(item)} style={{ marginHorizontal: 12, paddingVertical: 10, justifyContent: 'space-between', flexDirection: 'row', borderBottomColor: '#E0E6ED', borderBottomWidth: 1 }}>
<Text style={{ color: '#757885', fontFamily: 'Quicksand-Regular', fontSize: 16 }}>{item.address_name}</Text>
</TouchableOpacity>
)}
keyExtractor={(item, index) => `address item ${index}`}
/>
)
}
setParentState = (state) => {
this.setState({ state })
}
render () {
const { showData, totalItemSelected, address } = this.props
const { name, selectedAddress, addressName, phone, postalCode, fullAddress, selectedProvince, selectedCity, selectedKecamatan } = this.state
let obj = {
province: {
province_id: selectedProvince
},
city: {
city_id: selectedCity
},
kecamatan: {
kecamatan_id: selectedKecamatan
}
}
let success = true
const skippedStates = ['selectedProvince', 'selectedCity', 'selectedKecamatan', 'provinceName', 'cityName', 'kecamatanName', 'kecamatanCode', 'addressId']
for (let key of Object.keys(this.state)) {
if (isEmpty(this.state[key]) && !skippedStates.includes(key)) {
success = false
}
}
let ButtonComponent = (showData && success) ? ButtonFilledPrimary : ButtonFilledDisabled
let ButtonTextComponent = (showData && success) ? ButtonFilledText : ButtonFilledTextDisabled
return (
<Container>
{(showData) &&
<>
<FontSizeM>Alamat Penjemputan</FontSizeM>
<FormS>
<TouchableOpacity style={{ padding: 10, justifyContent: 'space-between', alignItems: 'center', flexDirection: 'row' }} onPress={() => this.refs.child.setModal(true)}>
<FontSizeM>{selectedAddress}</FontSizeM>
<Icon name='menu-down' size={24} />
</TouchableOpacity>
</FormS>
{selectedAddress.toLowerCase() === 'tambah alamat penjemputan'
? <>
<FontSizeM>Nama Alamat</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} value={addressName} onChangeText={(e) => this.setAddressData('addressName', e)} />
</FormS>
</>
: null
}
<FontSizeM>Nama Lengkap</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} value={name} onChangeText={(e) => this.setAddressData('name', e)} />
</FormS>
<FontSizeM>No telepon</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} keyboardType='numeric' value={phone} onChangeText={e => this.setAddressData('phone', e)} />
</FormS>
<FontSizeM>Kode Pos</FontSizeM>
<FormS>
<Input style={{ color: '#555761' }} keyboardType='numeric' maxLength={5} value={postalCode} onChangeText={e => this.setAddressData('postalCode', e)} />
</FormS>
<PickerLocation | {
shippingCost.shippingAmount += invoices[data.invoiceIndex].items[data.itemIndex].shipping_amount * qtyData.qty
shippingCost.handlingFee += invoices[data.invoiceIndex].items[data.itemIndex].handling_fee_adjust * qtyData.qty
} | conditional_block |
decode_test.go | 1544864, 333731851, 333731852, 333731850, 333731855,
333731858, 333731854, 108047, 769984352, 21544864},
Tags: map[string]string{
"area": "yes",
"highway": "pedestrian",
"name": "Fitzroy Square",
},
Info: Info{
Version: 7,
Timestamp: parseTime("2013-08-07T12:08:39Z"),
Changeset: 17253164,
Uid: 1016290,
User: "Amaroussi",
Visible: true,
},
}
er = &Relation{
ID: 7677,
Members: []Member{
{ID: 4875932, Type: WayType, Role: "outer"},
{ID: 4894305, Type: WayType, Role: "inner"},
},
Tags: map[string]string{
"created_by": "Potlatch 0.9c",
"type": "multipolygon",
},
Info: Info{
Version: 4,
Timestamp: parseTime("2008-07-19T15:04:03Z"),
Changeset: 540201,
Uid: 3876,
User: "Edgemaster",
Visible: true,
},
}
)
func init() {
IDs = make(map[string]bool)
for _, id := range IDsExpectedOrder {
IDs[id] = false
}
}
func downloadTestOSMFile(fileName string, t *testing.T) {
_, err := os.Stat(fileName)
if err == nil {
return
}
if !os.IsNotExist(err) {
t.Fatal(err)
}
resp, err := http.Get(GistURL + fileName)
if err != nil {
t.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("expected 200, got %d", resp.StatusCode)
}
out, err := os.Create(fileName)
if err != nil {
t.Fatal(err)
}
defer out.Close()
if _, err = io.Copy(out, resp.Body); err != nil {
t.Fatal(err)
}
}
func checkHeader(a *Header) bool {
if a == nil || a.BoundingBox == nil || a.RequiredFeatures == nil {
return false
}
// check bbox
if a.BoundingBox.Right != eh.BoundingBox.Right || a.BoundingBox.Left != eh.BoundingBox.Left || a.BoundingBox.Top != eh.BoundingBox.Top || a.BoundingBox.Bottom != eh.BoundingBox.Bottom {
return false
}
// check timestamp
if !a.OsmosisReplicationTimestamp.Equal(eh.OsmosisReplicationTimestamp) {
return false
}
// check writing program
if a.WritingProgram != eh.WritingProgram {
return false
}
// check features
if len(a.RequiredFeatures) != len(eh.RequiredFeatures) || a.RequiredFeatures[0] != eh.RequiredFeatures[0] || a.RequiredFeatures[1] != eh.RequiredFeatures[1] {
return false
}
return true
}
func decodePBF(PBFfileName string, t *testing.T) {
downloadTestOSMFile(PBFfileName, t)
f, err := os.Open(PBFfileName)
if err != nil {
t.Fatal(err)
}
defer f.Close()
d := NewDecoder(f)
d.SetBufferSize(1)
header, err := d.Header()
if err != nil {
t.Fatal(err)
}
if checkHeader(header) {
t.Errorf("\nExpected: %#v\nActual: %#v", eh, header)
}
err = d.Start(runtime.GOMAXPROCS(-1))
if err != nil {
t.Fatal(err)
}
var n *Node
var w *Way
var r *Relation
var nc, wc, rc uint64
var id string
idsOrder := make([]string, 0, len(IDsExpectedOrder))
for {
if v, err := d.Decode(); err == io.EOF {
break
} else if err != nil {
t.Fatal(err)
} else {
switch v := v.(type) {
case *Node:
nc++
if v.ID == en.ID {
n = v
}
id = fmt.Sprintf("node/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Way:
wc++
if v.ID == ew.ID {
w = v
}
id = fmt.Sprintf("way/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Relation:
rc++
if v.ID == er.ID {
r = v
}
id = fmt.Sprintf("relation/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
default:
t.Fatalf("unknown type %T", v)
}
}
}
if !reflect.DeepEqual(en, n) {
t.Errorf("\nExpected: %#v\nActual: %#v", en, n)
}
if !reflect.DeepEqual(ew, w) {
t.Errorf("\nExpected: %#v\nActual: %#v", ew, w)
}
if !reflect.DeepEqual(er, r) {
t.Errorf("\nExpected: %#v\nActual: %#v", er, r)
}
if enc != nc || ewc != wc || erc != rc {
t.Errorf("\nExpected %7d nodes, %7d ways, %7d relations\nGot %7d nodes, %7d ways, %7d relations.",
enc, ewc, erc, nc, wc, rc)
}
if !reflect.DeepEqual(IDsExpectedOrder, idsOrder) {
t.Errorf("\nExpected: %v\nGot: %v", IDsExpectedOrder, idsOrder)
}
}
func TestDecodePBFWithDenseNodes(t *testing.T) {
decodePBF(London, t)
}
func TestDecodePBFWithNodes(t *testing.T) {
decodePBF(LondonNonDense, t)
}
func TestDecodeConcurrent(t *testing.T) {
downloadTestOSMFile(London, t)
f, err := os.Open(London)
if err != nil {
t.Fatal(err)
}
defer f.Close()
d := NewDecoder(f)
d.SetBufferSize(1)
err = d.Start(runtime.GOMAXPROCS(-1))
if err != nil {
t.Fatal(err)
}
header, err := d.Header()
if err != nil {
t.Fatal(err)
}
if checkHeader(header) {
t.Errorf("\nExpected: %#v\nActual: %#v", eh, header)
}
var n *Node
var w *Way
var r *Relation
var nc, wc, rc uint64
var wg sync.WaitGroup
for i := 0; i < 4; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for {
if v, err := d.Decode(); err == io.EOF {
return
} else if err != nil {
t.Error(err)
return
} else {
switch v := v.(type) {
case *Node:
atomic.AddUint64(&nc, 1)
if v.ID == en.ID {
n = v
}
case *Way:
atomic.AddUint64(&wc, 1)
if v.ID == ew.ID {
w = v
}
case *Relation:
atomic.AddUint64(&rc, 1)
if v.ID == er.ID {
r = v
}
default:
t.Errorf("unknown type %T", v)
return
}
}
}
}()
}
wg.Wait()
if !reflect.DeepEqual(en, n) {
t.Errorf("\nExpected: %#v\nActual: %#v", en, n)
}
if !reflect.DeepEqual(ew, w) {
t.Errorf("\nExpected: %#v\nActual: %#v", ew, w)
}
if !reflect.DeepEqual(er, r) {
t.Errorf("\nExpected: %#v\nActual: %#v", er, r)
}
if enc != nc || ewc != wc || erc != rc {
t.Errorf("\nExpected %7d nodes, %7d ways, %7d relations\nGot %7d nodes, %7d ways, %7d relations",
enc, ewc, erc, nc, wc, rc)
}
}
func | BenchmarkDecode | identifier_name |
|
decode_test.go |
var (
IDsExpectedOrder = []string{
// Start of dense nodes.
"node/44", "node/47", "node/52", "node/58", "node/60",
"node/79", // Just because way/79 is already there
"node/2740703694", "node/2740703695", "node/2740703697",
"node/2740703699", "node/2740703701",
// End of dense nodes.
// Start of ways.
"way/73", "way/74", "way/75", "way/79", "way/482",
"way/268745428", "way/268745431", "way/268745434", "way/268745436",
"way/268745439",
// End of ways.
// Start of relations.
"relation/69", "relation/94", "relation/152", "relation/245",
"relation/332", "relation/3593436", "relation/3595575",
"relation/3595798", "relation/3599126", "relation/3599127",
// End of relations
}
IDs map[string]bool
enc uint64 = 2729006
ewc uint64 = 459055
erc uint64 = 12833
eh = &Header{
BoundingBox: &BoundingBox{
Right: 0.335437,
Left: -0.511482,
Bottom: 51.28554,
Top: 51.69344,
},
OsmosisReplicationTimestamp: time.Date(2014, 3, 24, 22, 55, 2, 0, time.FixedZone("test", 3600)),
RequiredFeatures: []string{
"OsmSchema-V0.6",
"DenseNodes",
},
WritingProgram: `Osmium (http:\/\/wiki.openstreetmap.org\/wiki\/Osmium)`,
}
en = &Node{
ID: 18088578,
Lat: 51.5442632,
Lon: -0.2010027,
Tags: map[string]string{
"alt_name": "The King's Head",
"amenity": "pub",
"created_by": "JOSM",
"name": "The Luminaire",
"note": "Live music venue too",
},
Info: Info{
Version: 2,
Timestamp: parseTime("2009-05-20T10:28:54Z"),
Changeset: 1260468,
Uid: 508,
User: "Welshie",
Visible: true,
},
}
ew = &Way{
ID: 4257116,
NodeIDs: []int64{
21544864, 333731851, 333731852, 333731850, 333731855,
333731858, 333731854, 108047, 769984352, 21544864},
Tags: map[string]string{
"area": "yes",
"highway": "pedestrian",
"name": "Fitzroy Square",
},
Info: Info{
Version: 7,
Timestamp: parseTime("2013-08-07T12:08:39Z"),
Changeset: 17253164,
Uid: 1016290,
User: "Amaroussi",
Visible: true,
},
}
er = &Relation{
ID: 7677,
Members: []Member{
{ID: 4875932, Type: WayType, Role: "outer"},
{ID: 4894305, Type: WayType, Role: "inner"},
},
Tags: map[string]string{
"created_by": "Potlatch 0.9c",
"type": "multipolygon",
},
Info: Info{
Version: 4,
Timestamp: parseTime("2008-07-19T15:04:03Z"),
Changeset: 540201,
Uid: 3876,
User: "Edgemaster",
Visible: true,
},
}
)
func init() {
IDs = make(map[string]bool)
for _, id := range IDsExpectedOrder {
IDs[id] = false
}
}
func downloadTestOSMFile(fileName string, t *testing.T) {
_, err := os.Stat(fileName)
if err == nil {
return
}
if !os.IsNotExist(err) {
t.Fatal(err)
}
resp, err := http.Get(GistURL + fileName)
if err != nil {
t.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("expected 200, got %d", resp.StatusCode)
}
out, err := os.Create(fileName)
if err != nil {
t.Fatal(err)
}
defer out.Close()
if _, err = io.Copy(out, resp.Body); err != nil {
t.Fatal(err)
}
}
func checkHeader(a *Header) bool {
if a == nil || a.BoundingBox == nil || a.RequiredFeatures == nil {
return false
}
// check bbox
if a.BoundingBox.Right != eh.BoundingBox.Right || a.BoundingBox.Left != eh.BoundingBox.Left || a.BoundingBox.Top != eh.BoundingBox.Top || a.BoundingBox.Bottom != eh.BoundingBox.Bottom {
return false
}
// check timestamp
if !a.OsmosisReplicationTimestamp.Equal(eh.OsmosisReplicationTimestamp) {
return false
}
// check writing program
if a.WritingProgram != eh.WritingProgram {
return false
}
// check features
if len(a.RequiredFeatures) != len(eh.RequiredFeatures) || a.RequiredFeatures[0] != eh.RequiredFeatures[0] || a.RequiredFeatures[1] != eh.RequiredFeatures[1] {
return false
}
return true
}
func decodePBF(PBFfileName string, t *testing.T) {
downloadTestOSMFile(PBFfileName, t)
f, err := os.Open(PBFfileName)
if err != nil {
t.Fatal(err)
}
defer f.Close()
d := NewDecoder(f)
d.SetBufferSize(1)
header, err := d.Header()
if err != nil {
t.Fatal(err)
}
if checkHeader(header) {
t.Errorf("\nExpected: %#v\nActual: %#v", eh, header)
}
err = d.Start(runtime.GOMAXPROCS(-1))
if err != nil {
t.Fatal(err)
}
var n *Node
var w *Way
var r *Relation
var nc, wc, rc uint64
var id string
idsOrder := make([]string, 0, len(IDsExpectedOrder))
for {
if v, err := d.Decode(); err == io.EOF {
break
} else if err != nil {
t.Fatal(err)
} else {
switch v := v.(type) {
case *Node:
nc++
if v.ID == en.ID {
n = v
}
id = fmt.Sprintf("node/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Way:
wc++
if v.ID == ew.ID {
w = v
}
id = fmt.Sprintf("way/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Relation:
rc++
if v | {
t, err := time.Parse(time.RFC3339, s)
if err != nil {
panic(err)
}
return t
} | identifier_body |
|
decode_test.go | "name": "Fitzroy Square",
},
Info: Info{
Version: 7,
Timestamp: parseTime("2013-08-07T12:08:39Z"),
Changeset: 17253164,
Uid: 1016290,
User: "Amaroussi",
Visible: true,
},
}
er = &Relation{
ID: 7677,
Members: []Member{
{ID: 4875932, Type: WayType, Role: "outer"},
{ID: 4894305, Type: WayType, Role: "inner"},
},
Tags: map[string]string{
"created_by": "Potlatch 0.9c",
"type": "multipolygon",
},
Info: Info{
Version: 4,
Timestamp: parseTime("2008-07-19T15:04:03Z"),
Changeset: 540201,
Uid: 3876,
User: "Edgemaster",
Visible: true,
},
}
)
func init() {
IDs = make(map[string]bool)
for _, id := range IDsExpectedOrder {
IDs[id] = false
}
}
func downloadTestOSMFile(fileName string, t *testing.T) {
_, err := os.Stat(fileName)
if err == nil {
return
}
if !os.IsNotExist(err) {
t.Fatal(err)
}
resp, err := http.Get(GistURL + fileName)
if err != nil {
t.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("expected 200, got %d", resp.StatusCode)
}
out, err := os.Create(fileName)
if err != nil {
t.Fatal(err)
}
defer out.Close()
if _, err = io.Copy(out, resp.Body); err != nil {
t.Fatal(err)
}
}
func checkHeader(a *Header) bool {
if a == nil || a.BoundingBox == nil || a.RequiredFeatures == nil {
return false
}
// check bbox
if a.BoundingBox.Right != eh.BoundingBox.Right || a.BoundingBox.Left != eh.BoundingBox.Left || a.BoundingBox.Top != eh.BoundingBox.Top || a.BoundingBox.Bottom != eh.BoundingBox.Bottom {
return false
}
// check timestamp
if !a.OsmosisReplicationTimestamp.Equal(eh.OsmosisReplicationTimestamp) {
return false
}
// check writing program
if a.WritingProgram != eh.WritingProgram {
return false
}
// check features
if len(a.RequiredFeatures) != len(eh.RequiredFeatures) || a.RequiredFeatures[0] != eh.RequiredFeatures[0] || a.RequiredFeatures[1] != eh.RequiredFeatures[1] {
return false
}
return true
}
func decodePBF(PBFfileName string, t *testing.T) {
downloadTestOSMFile(PBFfileName, t)
f, err := os.Open(PBFfileName)
if err != nil {
t.Fatal(err)
}
defer f.Close()
d := NewDecoder(f)
d.SetBufferSize(1)
header, err := d.Header()
if err != nil {
t.Fatal(err)
}
if checkHeader(header) {
t.Errorf("\nExpected: %#v\nActual: %#v", eh, header)
}
err = d.Start(runtime.GOMAXPROCS(-1))
if err != nil {
t.Fatal(err)
}
var n *Node
var w *Way
var r *Relation
var nc, wc, rc uint64
var id string
idsOrder := make([]string, 0, len(IDsExpectedOrder))
for {
if v, err := d.Decode(); err == io.EOF {
break
} else if err != nil {
t.Fatal(err)
} else {
switch v := v.(type) {
case *Node:
nc++
if v.ID == en.ID {
n = v
}
id = fmt.Sprintf("node/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Way:
wc++
if v.ID == ew.ID {
w = v
}
id = fmt.Sprintf("way/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Relation:
rc++
if v.ID == er.ID {
r = v
}
id = fmt.Sprintf("relation/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
default:
t.Fatalf("unknown type %T", v)
}
}
}
if !reflect.DeepEqual(en, n) {
t.Errorf("\nExpected: %#v\nActual: %#v", en, n)
}
if !reflect.DeepEqual(ew, w) {
t.Errorf("\nExpected: %#v\nActual: %#v", ew, w)
}
if !reflect.DeepEqual(er, r) {
t.Errorf("\nExpected: %#v\nActual: %#v", er, r)
}
if enc != nc || ewc != wc || erc != rc {
t.Errorf("\nExpected %7d nodes, %7d ways, %7d relations\nGot %7d nodes, %7d ways, %7d relations.",
enc, ewc, erc, nc, wc, rc)
}
if !reflect.DeepEqual(IDsExpectedOrder, idsOrder) {
t.Errorf("\nExpected: %v\nGot: %v", IDsExpectedOrder, idsOrder)
}
}
func TestDecodePBFWithDenseNodes(t *testing.T) {
decodePBF(London, t)
}
func TestDecodePBFWithNodes(t *testing.T) {
decodePBF(LondonNonDense, t)
}
func TestDecodeConcurrent(t *testing.T) {
downloadTestOSMFile(London, t)
f, err := os.Open(London)
if err != nil {
t.Fatal(err)
}
defer f.Close()
d := NewDecoder(f)
d.SetBufferSize(1)
err = d.Start(runtime.GOMAXPROCS(-1))
if err != nil {
t.Fatal(err)
}
header, err := d.Header()
if err != nil {
t.Fatal(err)
}
if checkHeader(header) {
t.Errorf("\nExpected: %#v\nActual: %#v", eh, header)
}
var n *Node
var w *Way
var r *Relation
var nc, wc, rc uint64
var wg sync.WaitGroup
for i := 0; i < 4; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for {
if v, err := d.Decode(); err == io.EOF {
return
} else if err != nil {
t.Error(err)
return
} else {
switch v := v.(type) {
case *Node:
atomic.AddUint64(&nc, 1)
if v.ID == en.ID {
n = v
}
case *Way:
atomic.AddUint64(&wc, 1)
if v.ID == ew.ID {
w = v
}
case *Relation:
atomic.AddUint64(&rc, 1)
if v.ID == er.ID {
r = v
}
default:
t.Errorf("unknown type %T", v)
return
}
}
}
}()
}
wg.Wait()
if !reflect.DeepEqual(en, n) {
t.Errorf("\nExpected: %#v\nActual: %#v", en, n)
}
if !reflect.DeepEqual(ew, w) {
t.Errorf("\nExpected: %#v\nActual: %#v", ew, w)
}
if !reflect.DeepEqual(er, r) {
t.Errorf("\nExpected: %#v\nActual: %#v", er, r)
}
if enc != nc || ewc != wc || erc != rc {
t.Errorf("\nExpected %7d nodes, %7d ways, %7d relations\nGot %7d nodes, %7d ways, %7d relations",
enc, ewc, erc, nc, wc, rc)
}
}
func BenchmarkDecode(b *testing.B) {
file := os.Getenv("OSMPBF_BENCHMARK_FILE")
if file == "" {
file = London
}
f, err := os.Open(file)
if err != nil {
b.Fatal(err)
}
defer f.Close()
fileInfo, err := f.Stat()
if err != nil {
b.Fatal(err)
}
blobBufferSize, _ := strconv.Atoi(os.Getenv("OSMPBF_BENCHMARK_BUFFER")) |
b.ResetTimer()
for i := 0; i < b.N; i++ {
if _, err = f.Seek(0, 0); err != nil {
b.Fatal(err) | random_line_split |
|
decode_test.go | 45428", "way/268745431", "way/268745434", "way/268745436",
"way/268745439",
// End of ways.
// Start of relations.
"relation/69", "relation/94", "relation/152", "relation/245",
"relation/332", "relation/3593436", "relation/3595575",
"relation/3595798", "relation/3599126", "relation/3599127",
// End of relations
}
IDs map[string]bool
enc uint64 = 2729006
ewc uint64 = 459055
erc uint64 = 12833
eh = &Header{
BoundingBox: &BoundingBox{
Right: 0.335437,
Left: -0.511482,
Bottom: 51.28554,
Top: 51.69344,
},
OsmosisReplicationTimestamp: time.Date(2014, 3, 24, 22, 55, 2, 0, time.FixedZone("test", 3600)),
RequiredFeatures: []string{
"OsmSchema-V0.6",
"DenseNodes",
},
WritingProgram: `Osmium (http:\/\/wiki.openstreetmap.org\/wiki\/Osmium)`,
}
en = &Node{
ID: 18088578,
Lat: 51.5442632,
Lon: -0.2010027,
Tags: map[string]string{
"alt_name": "The King's Head",
"amenity": "pub",
"created_by": "JOSM",
"name": "The Luminaire",
"note": "Live music venue too",
},
Info: Info{
Version: 2,
Timestamp: parseTime("2009-05-20T10:28:54Z"),
Changeset: 1260468,
Uid: 508,
User: "Welshie",
Visible: true,
},
}
ew = &Way{
ID: 4257116,
NodeIDs: []int64{
21544864, 333731851, 333731852, 333731850, 333731855,
333731858, 333731854, 108047, 769984352, 21544864},
Tags: map[string]string{
"area": "yes",
"highway": "pedestrian",
"name": "Fitzroy Square",
},
Info: Info{
Version: 7,
Timestamp: parseTime("2013-08-07T12:08:39Z"),
Changeset: 17253164,
Uid: 1016290,
User: "Amaroussi",
Visible: true,
},
}
er = &Relation{
ID: 7677,
Members: []Member{
{ID: 4875932, Type: WayType, Role: "outer"},
{ID: 4894305, Type: WayType, Role: "inner"},
},
Tags: map[string]string{
"created_by": "Potlatch 0.9c",
"type": "multipolygon",
},
Info: Info{
Version: 4,
Timestamp: parseTime("2008-07-19T15:04:03Z"),
Changeset: 540201,
Uid: 3876,
User: "Edgemaster",
Visible: true,
},
}
)
func init() {
IDs = make(map[string]bool)
for _, id := range IDsExpectedOrder {
IDs[id] = false
}
}
func downloadTestOSMFile(fileName string, t *testing.T) {
_, err := os.Stat(fileName)
if err == nil {
return
}
if !os.IsNotExist(err) {
t.Fatal(err)
}
resp, err := http.Get(GistURL + fileName)
if err != nil {
t.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("expected 200, got %d", resp.StatusCode)
}
out, err := os.Create(fileName)
if err != nil {
t.Fatal(err)
}
defer out.Close()
if _, err = io.Copy(out, resp.Body); err != nil |
}
func checkHeader(a *Header) bool {
if a == nil || a.BoundingBox == nil || a.RequiredFeatures == nil {
return false
}
// check bbox
if a.BoundingBox.Right != eh.BoundingBox.Right || a.BoundingBox.Left != eh.BoundingBox.Left || a.BoundingBox.Top != eh.BoundingBox.Top || a.BoundingBox.Bottom != eh.BoundingBox.Bottom {
return false
}
// check timestamp
if !a.OsmosisReplicationTimestamp.Equal(eh.OsmosisReplicationTimestamp) {
return false
}
// check writing program
if a.WritingProgram != eh.WritingProgram {
return false
}
// check features
if len(a.RequiredFeatures) != len(eh.RequiredFeatures) || a.RequiredFeatures[0] != eh.RequiredFeatures[0] || a.RequiredFeatures[1] != eh.RequiredFeatures[1] {
return false
}
return true
}
func decodePBF(PBFfileName string, t *testing.T) {
downloadTestOSMFile(PBFfileName, t)
f, err := os.Open(PBFfileName)
if err != nil {
t.Fatal(err)
}
defer f.Close()
d := NewDecoder(f)
d.SetBufferSize(1)
header, err := d.Header()
if err != nil {
t.Fatal(err)
}
if checkHeader(header) {
t.Errorf("\nExpected: %#v\nActual: %#v", eh, header)
}
err = d.Start(runtime.GOMAXPROCS(-1))
if err != nil {
t.Fatal(err)
}
var n *Node
var w *Way
var r *Relation
var nc, wc, rc uint64
var id string
idsOrder := make([]string, 0, len(IDsExpectedOrder))
for {
if v, err := d.Decode(); err == io.EOF {
break
} else if err != nil {
t.Fatal(err)
} else {
switch v := v.(type) {
case *Node:
nc++
if v.ID == en.ID {
n = v
}
id = fmt.Sprintf("node/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Way:
wc++
if v.ID == ew.ID {
w = v
}
id = fmt.Sprintf("way/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
case *Relation:
rc++
if v.ID == er.ID {
r = v
}
id = fmt.Sprintf("relation/%d", v.ID)
if _, ok := IDs[id]; ok {
idsOrder = append(idsOrder, id)
}
default:
t.Fatalf("unknown type %T", v)
}
}
}
if !reflect.DeepEqual(en, n) {
t.Errorf("\nExpected: %#v\nActual: %#v", en, n)
}
if !reflect.DeepEqual(ew, w) {
t.Errorf("\nExpected: %#v\nActual: %#v", ew, w)
}
if !reflect.DeepEqual(er, r) {
t.Errorf("\nExpected: %#v\nActual: %#v", er, r)
}
if enc != nc || ewc != wc || erc != rc {
t.Errorf("\nExpected %7d nodes, %7d ways, %7d relations\nGot %7d nodes, %7d ways, %7d relations.",
enc, ewc, erc, nc, wc | {
t.Fatal(err)
} | conditional_block |
satAverage.py | nodd=0
for i in range(0, len(lines)):
if i%2:
nodd+=1
oddf.write(lines[i])
else:
neven+=1
evenf.write(lines[i])
evenf.close()
oddf.close()
if neven>0:
self.makeClassAverages(evenfile, self.params['evenstack'], classdata, maskrad)
if nodd>0:
self.makeClassAverages(oddfile, self.params['oddstack'], classdata, maskrad)
apFile.removeFile(evenfile)
apFile.removeFile(oddfile)
#=====================
def getParticleInfo(self, reconid, iteration):
"""
Get all particle data for given recon and iteration
"""
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"refineparticledata-r"+str(reconid)+"-i"+str(iteration)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading refineparticledata from cache file", "cyan")
f = open(cachefile, 'r')
refineparticledata = cPickle.load(f)
f.close()
else:
refinerundata = appiondata.ApRefineRunData.direct_query(reconid)
if not refinerundata:
apDisplay.printError("Could not find refinerundata for reconrun id="+str(reconid))
refineq = appiondata.ApRefineIterData()
refineq['refineRun'] = refinerundata
refineq['iteration'] = iteration
refinedata = refineq.query(results=1)
if not refinedata:
apDisplay.printError("Could not find refinedata for reconrun id="
+str(reconid)+" iter="+str(iteration))
refinepartq=appiondata.ApRefineParticleData()
refinepartq['refineIter']=refinedata[0]
apDisplay.printMsg("querying particles on "+time.asctime())
refineparticledata = refinepartq.query()
apDisplay.printMsg("saving refineparticledata to cache file")
f = open(cachefile, 'w')
cPickle.dump(refineparticledata, f)
f.close()
apDisplay.printMsg("received "+str(len(refineparticledata))+" particles in "+apDisplay.timeString(time.time()-t0))
return refineparticledata
#=====================
def procKeepList(self):
"""
Removes particles by reading a list of particle numbers generated externally.
Requirements:
the input file has one particle per line
the first piece of data is the particle number from the db
"""
keeplist = []
f = open(self.params['keeplist'], 'r')
lines = f.readlines()
f.close()
for n in lines:
words = n.split()
keeplist.append(int(words[0])+1)
return keeplist
#=====================
def makeClassAverages(self, classlist, outputstack, classdata, maskrad):
#align images in class
#print classlist
images = EMAN.readImages(classlist, -1, -1, 0)
for image in images:
image.rotateAndTranslate()
if image.isFlipped():
image.hFlip()
#make class average
avg = EMAN.EMData()
avg.makeMedian(images)
#write class average
e = EMAN.Euler()
alt = classdata['euler1']*math.pi/180
az = classdata['euler2']*math.pi/180
phi = 0.0 #classdata['euler3']*math.pi/180
e.setAngle(alt, az, phi)
avg.setRAlign(e)
avg.setNImg(len(images))
avg.applyMask(maskrad, 0)
avg.writeImage(outputstack,-1)
#=====================
def determineClasses(self, particles):
"""
Takes refineparticledata and returns a dictionary of classes
"""
apDisplay.printMsg("sorting refineparticledata into classes")
t0 = time.time()
classes={}
class_stats={}
quality=numpy.zeros(len(particles))
for partnum in range(len(particles)):
quality[partnum] = particles[partnum]['quality_factor']
key = ("%.3f_%.3f"%(particles[partnum]['euler1'], particles[partnum]['euler2']))
if key not in classes.keys():
classes[key]={}
classes[key]['particles']=[]
classes[key]['euler1'] = particles[partnum]['euler1']
classes[key]['euler2'] = particles[partnum]['euler2']
#classes have no inplane rotation
classes[key]['euler3'] = 0.0 #particles[partnum]['euler3']
classes[key]['particles'].append(particles[partnum])
class_stats['meanquality']=quality.mean()
class_stats['stdquality']=quality.std()
class_stats['max']=quality.max()
class_stats['min']=quality.min()
apDisplay.printMsg("sorted %d particles into %d classes"%(len(particles), len(classes)))
### print stats
print "-- quality factor stats --"
print ("mean/std :: "+str(round(class_stats['meanquality'],2))+" +/- "
+str(round(class_stats['stdquality'],2)))
print ("min/max :: "+str(round(class_stats['min'],2))+" <> "
+str(round(class_stats['max'],2)))
apDisplay.printMsg("finished sorting in "+apDisplay.timeString(time.time()-t0))
return classes, class_stats
#=====================
def getClassData(self, reconid, iternum):
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"partclassdata-r"+str(reconid)+"-i"+str(iternum)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading particle class data from cache file", "cyan")
f = open(cachefile, 'r')
classes = cPickle.load(f)
f.close()
else:
apDisplay.printMsg("determine particle class data from database")
particles = self.getParticleInfo(reconid, iternum)
classes, cstats = self.determineClasses(particles)
f = open(cachefile, 'w')
apDisplay.printMsg("saving particle class data to cache file")
cPickle.dump(classes, f)
f.close()
apDisplay.printMsg("received "+str(len(classes))+" classes in "+apDisplay.timeString(time.time()-t0))
return classes
#######################################################
#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####
#######################################################
#=====================
def setupParserOptions(self):
self.parser.set_usage("Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\n\t "
+"[ --stackname=<name> "
+" --avgjump=<avg> --sigma=<sigma> --eotest ]")
self.parser.add_option("-r", "--reconid", dest="reconid", type="int",
help="Reconstruction run id", metavar="INT")
self.parser.add_option("-m", "--mask", dest="mask", type="int",
help="Mask radius in pixels", metavar="INT")
self.parser.add_option("-i", "--iter", dest="iter", type="int",
help="Final eulers applied to particles will come from this iteration", metavar="INT")
self.parser.add_option("--stackname", dest="stackname", default="goodavgs.hed",
help="Name of the stack to write the averages", metavar="file.hed")
self.parser.add_option("--keep-list", dest="keeplist",
help="Keep particles in the specified text file, EMAN style 0,1,...", metavar="TEXT")
self.parser.add_option("--eotest", dest="eotest", default=False,
action="store_true", help="Perform even/odd test")
#=====================
def checkConflicts(self):
if self.params['reconid'] is None:
apDisplay.printError("enter a reconstruction ID from the database")
if self.params['mask'] is None:
apDisplay.printError("enter a mask radius")
if self.params['iter'] is None:
apDisplay.printError("enter an iteration for the final Eulers")
if self.params['keeplist'] is None:
apDisplay.printError("enter an keep list file")
self.params['keeplist'] = os.path.abspath(self.params['keeplist'])
if not os.path.isfile(self.params['keeplist']):
apDisplay.printError("could not find list file")
self.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])
if self.params['stackname'][-4:] != ".hed":
s = os.path.splitext(self.params['stackname'])[0]
s += ".hed"
self.params['stackname'] | random_line_split |
||
satAverage.py | ineq = appiondata.ApRefineIterData()
refineq['refineRun'] = refinerundata
refineq['iteration'] = iteration
refinedata = refineq.query(results=1)
if not refinedata:
apDisplay.printError("Could not find refinedata for reconrun id="
+str(reconid)+" iter="+str(iteration))
refinepartq=appiondata.ApRefineParticleData()
refinepartq['refineIter']=refinedata[0]
apDisplay.printMsg("querying particles on "+time.asctime())
refineparticledata = refinepartq.query()
apDisplay.printMsg("saving refineparticledata to cache file")
f = open(cachefile, 'w')
cPickle.dump(refineparticledata, f)
f.close()
apDisplay.printMsg("received "+str(len(refineparticledata))+" particles in "+apDisplay.timeString(time.time()-t0))
return refineparticledata
#=====================
def procKeepList(self):
"""
Removes particles by reading a list of particle numbers generated externally.
Requirements:
the input file has one particle per line
the first piece of data is the particle number from the db
"""
keeplist = []
f = open(self.params['keeplist'], 'r')
lines = f.readlines()
f.close()
for n in lines:
words = n.split()
keeplist.append(int(words[0])+1)
return keeplist
#=====================
def makeClassAverages(self, classlist, outputstack, classdata, maskrad):
#align images in class
#print classlist
images = EMAN.readImages(classlist, -1, -1, 0)
for image in images:
image.rotateAndTranslate()
if image.isFlipped():
image.hFlip()
#make class average
avg = EMAN.EMData()
avg.makeMedian(images)
#write class average
e = EMAN.Euler()
alt = classdata['euler1']*math.pi/180
az = classdata['euler2']*math.pi/180
phi = 0.0 #classdata['euler3']*math.pi/180
e.setAngle(alt, az, phi)
avg.setRAlign(e)
avg.setNImg(len(images))
avg.applyMask(maskrad, 0)
avg.writeImage(outputstack,-1)
#=====================
def determineClasses(self, particles):
"""
Takes refineparticledata and returns a dictionary of classes
"""
apDisplay.printMsg("sorting refineparticledata into classes")
t0 = time.time()
classes={}
class_stats={}
quality=numpy.zeros(len(particles))
for partnum in range(len(particles)):
quality[partnum] = particles[partnum]['quality_factor']
key = ("%.3f_%.3f"%(particles[partnum]['euler1'], particles[partnum]['euler2']))
if key not in classes.keys():
classes[key]={}
classes[key]['particles']=[]
classes[key]['euler1'] = particles[partnum]['euler1']
classes[key]['euler2'] = particles[partnum]['euler2']
#classes have no inplane rotation
classes[key]['euler3'] = 0.0 #particles[partnum]['euler3']
classes[key]['particles'].append(particles[partnum])
class_stats['meanquality']=quality.mean()
class_stats['stdquality']=quality.std()
class_stats['max']=quality.max()
class_stats['min']=quality.min()
apDisplay.printMsg("sorted %d particles into %d classes"%(len(particles), len(classes)))
### print stats
print "-- quality factor stats --"
print ("mean/std :: "+str(round(class_stats['meanquality'],2))+" +/- "
+str(round(class_stats['stdquality'],2)))
print ("min/max :: "+str(round(class_stats['min'],2))+" <> "
+str(round(class_stats['max'],2)))
apDisplay.printMsg("finished sorting in "+apDisplay.timeString(time.time()-t0))
return classes, class_stats
#=====================
def getClassData(self, reconid, iternum):
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"partclassdata-r"+str(reconid)+"-i"+str(iternum)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading particle class data from cache file", "cyan")
f = open(cachefile, 'r')
classes = cPickle.load(f)
f.close()
else:
apDisplay.printMsg("determine particle class data from database")
particles = self.getParticleInfo(reconid, iternum)
classes, cstats = self.determineClasses(particles)
f = open(cachefile, 'w')
apDisplay.printMsg("saving particle class data to cache file")
cPickle.dump(classes, f)
f.close()
apDisplay.printMsg("received "+str(len(classes))+" classes in "+apDisplay.timeString(time.time()-t0))
return classes
#######################################################
#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####
#######################################################
#=====================
def setupParserOptions(self):
self.parser.set_usage("Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\n\t "
+"[ --stackname=<name> "
+" --avgjump=<avg> --sigma=<sigma> --eotest ]")
self.parser.add_option("-r", "--reconid", dest="reconid", type="int",
help="Reconstruction run id", metavar="INT")
self.parser.add_option("-m", "--mask", dest="mask", type="int",
help="Mask radius in pixels", metavar="INT")
self.parser.add_option("-i", "--iter", dest="iter", type="int",
help="Final eulers applied to particles will come from this iteration", metavar="INT")
self.parser.add_option("--stackname", dest="stackname", default="goodavgs.hed",
help="Name of the stack to write the averages", metavar="file.hed")
self.parser.add_option("--keep-list", dest="keeplist",
help="Keep particles in the specified text file, EMAN style 0,1,...", metavar="TEXT")
self.parser.add_option("--eotest", dest="eotest", default=False,
action="store_true", help="Perform even/odd test")
#=====================
def checkConflicts(self):
if self.params['reconid'] is None:
apDisplay.printError("enter a reconstruction ID from the database")
if self.params['mask'] is None:
apDisplay.printError("enter a mask radius")
if self.params['iter'] is None:
apDisplay.printError("enter an iteration for the final Eulers")
if self.params['keeplist'] is None:
apDisplay.printError("enter an keep list file")
self.params['keeplist'] = os.path.abspath(self.params['keeplist'])
if not os.path.isfile(self.params['keeplist']):
apDisplay.printError("could not find list file")
self.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])
if self.params['stackname'][-4:] != ".hed":
s = os.path.splitext(self.params['stackname'])[0]
s += ".hed"
self.params['stackname'] = s
apDisplay.printMsg("Stack name: "+self.params['stackname'])
self.params['symmetry'] = apSymmetry.getSymmetryFromReconRunId(self.params['reconid'])
self.params['symmname'] = self.params['symmetry']['eman_name']
#=====================
def setRunDir(self):
|
#=====================
def start(self):
self.rootname = self.params['stackname'].split(".")[0]
self.params['outputstack'] = os.path.join(self.params['rundir'], self.params['stackname'])
if os.path.isfile(self.params['outputstack']):
apFile.removeStack(self.params['outputstack'])
if self.params['eotest'] is True:
self.params['evenstack'] = os.path.splitext(self.params['outputstack'])[0]+'.even.hed'
if os.path.isfile(self.params['evenstack']):
apFile.removeStack(self.params['evenstack'])
self.params['oddstack'] = os.path.splitext(self.params['outputstack'])[0]+'.odd | refdata = appiondata.ApRefineRunData.direct_query(self.params['reconid'])
if not refdata:
apDisplay.printError("reconid "+str(self.params['reconid'])+" does not exist in the database")
refpath = refdata['path']['path']
rundir = os.path.join(refpath, "../../satEuler/sat-recon%d/volumes"%(self.params['reconid']))
self.params['rundir'] = os.path.abspath(rundir) | identifier_body |
satAverage.py | ineq = appiondata.ApRefineIterData()
refineq['refineRun'] = refinerundata
refineq['iteration'] = iteration
refinedata = refineq.query(results=1)
if not refinedata:
apDisplay.printError("Could not find refinedata for reconrun id="
+str(reconid)+" iter="+str(iteration))
refinepartq=appiondata.ApRefineParticleData()
refinepartq['refineIter']=refinedata[0]
apDisplay.printMsg("querying particles on "+time.asctime())
refineparticledata = refinepartq.query()
apDisplay.printMsg("saving refineparticledata to cache file")
f = open(cachefile, 'w')
cPickle.dump(refineparticledata, f)
f.close()
apDisplay.printMsg("received "+str(len(refineparticledata))+" particles in "+apDisplay.timeString(time.time()-t0))
return refineparticledata
#=====================
def | (self):
"""
Removes particles by reading a list of particle numbers generated externally.
Requirements:
the input file has one particle per line
the first piece of data is the particle number from the db
"""
keeplist = []
f = open(self.params['keeplist'], 'r')
lines = f.readlines()
f.close()
for n in lines:
words = n.split()
keeplist.append(int(words[0])+1)
return keeplist
#=====================
def makeClassAverages(self, classlist, outputstack, classdata, maskrad):
#align images in class
#print classlist
images = EMAN.readImages(classlist, -1, -1, 0)
for image in images:
image.rotateAndTranslate()
if image.isFlipped():
image.hFlip()
#make class average
avg = EMAN.EMData()
avg.makeMedian(images)
#write class average
e = EMAN.Euler()
alt = classdata['euler1']*math.pi/180
az = classdata['euler2']*math.pi/180
phi = 0.0 #classdata['euler3']*math.pi/180
e.setAngle(alt, az, phi)
avg.setRAlign(e)
avg.setNImg(len(images))
avg.applyMask(maskrad, 0)
avg.writeImage(outputstack,-1)
#=====================
def determineClasses(self, particles):
"""
Takes refineparticledata and returns a dictionary of classes
"""
apDisplay.printMsg("sorting refineparticledata into classes")
t0 = time.time()
classes={}
class_stats={}
quality=numpy.zeros(len(particles))
for partnum in range(len(particles)):
quality[partnum] = particles[partnum]['quality_factor']
key = ("%.3f_%.3f"%(particles[partnum]['euler1'], particles[partnum]['euler2']))
if key not in classes.keys():
classes[key]={}
classes[key]['particles']=[]
classes[key]['euler1'] = particles[partnum]['euler1']
classes[key]['euler2'] = particles[partnum]['euler2']
#classes have no inplane rotation
classes[key]['euler3'] = 0.0 #particles[partnum]['euler3']
classes[key]['particles'].append(particles[partnum])
class_stats['meanquality']=quality.mean()
class_stats['stdquality']=quality.std()
class_stats['max']=quality.max()
class_stats['min']=quality.min()
apDisplay.printMsg("sorted %d particles into %d classes"%(len(particles), len(classes)))
### print stats
print "-- quality factor stats --"
print ("mean/std :: "+str(round(class_stats['meanquality'],2))+" +/- "
+str(round(class_stats['stdquality'],2)))
print ("min/max :: "+str(round(class_stats['min'],2))+" <> "
+str(round(class_stats['max'],2)))
apDisplay.printMsg("finished sorting in "+apDisplay.timeString(time.time()-t0))
return classes, class_stats
#=====================
def getClassData(self, reconid, iternum):
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"partclassdata-r"+str(reconid)+"-i"+str(iternum)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading particle class data from cache file", "cyan")
f = open(cachefile, 'r')
classes = cPickle.load(f)
f.close()
else:
apDisplay.printMsg("determine particle class data from database")
particles = self.getParticleInfo(reconid, iternum)
classes, cstats = self.determineClasses(particles)
f = open(cachefile, 'w')
apDisplay.printMsg("saving particle class data to cache file")
cPickle.dump(classes, f)
f.close()
apDisplay.printMsg("received "+str(len(classes))+" classes in "+apDisplay.timeString(time.time()-t0))
return classes
#######################################################
#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####
#######################################################
#=====================
def setupParserOptions(self):
self.parser.set_usage("Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\n\t "
+"[ --stackname=<name> "
+" --avgjump=<avg> --sigma=<sigma> --eotest ]")
self.parser.add_option("-r", "--reconid", dest="reconid", type="int",
help="Reconstruction run id", metavar="INT")
self.parser.add_option("-m", "--mask", dest="mask", type="int",
help="Mask radius in pixels", metavar="INT")
self.parser.add_option("-i", "--iter", dest="iter", type="int",
help="Final eulers applied to particles will come from this iteration", metavar="INT")
self.parser.add_option("--stackname", dest="stackname", default="goodavgs.hed",
help="Name of the stack to write the averages", metavar="file.hed")
self.parser.add_option("--keep-list", dest="keeplist",
help="Keep particles in the specified text file, EMAN style 0,1,...", metavar="TEXT")
self.parser.add_option("--eotest", dest="eotest", default=False,
action="store_true", help="Perform even/odd test")
#=====================
def checkConflicts(self):
if self.params['reconid'] is None:
apDisplay.printError("enter a reconstruction ID from the database")
if self.params['mask'] is None:
apDisplay.printError("enter a mask radius")
if self.params['iter'] is None:
apDisplay.printError("enter an iteration for the final Eulers")
if self.params['keeplist'] is None:
apDisplay.printError("enter an keep list file")
self.params['keeplist'] = os.path.abspath(self.params['keeplist'])
if not os.path.isfile(self.params['keeplist']):
apDisplay.printError("could not find list file")
self.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])
if self.params['stackname'][-4:] != ".hed":
s = os.path.splitext(self.params['stackname'])[0]
s += ".hed"
self.params['stackname'] = s
apDisplay.printMsg("Stack name: "+self.params['stackname'])
self.params['symmetry'] = apSymmetry.getSymmetryFromReconRunId(self.params['reconid'])
self.params['symmname'] = self.params['symmetry']['eman_name']
#=====================
def setRunDir(self):
refdata = appiondata.ApRefineRunData.direct_query(self.params['reconid'])
if not refdata:
apDisplay.printError("reconid "+str(self.params['reconid'])+" does not exist in the database")
refpath = refdata['path']['path']
rundir = os.path.join(refpath, "../../satEuler/sat-recon%d/volumes"%(self.params['reconid']))
self.params['rundir'] = os.path.abspath(rundir)
#=====================
def start(self):
self.rootname = self.params['stackname'].split(".")[0]
self.params['outputstack'] = os.path.join(self.params['rundir'], self.params['stackname'])
if os.path.isfile(self.params['outputstack']):
apFile.removeStack(self.params['outputstack'])
if self.params['eotest'] is True:
self.params['evenstack'] = os.path.splitext(self.params['outputstack'])[0]+'.even.hed'
if os.path.isfile(self.params['evenstack']):
apFile.removeStack(self.params['evenstack'])
self.params['oddstack'] = os.path.splitext(self.params['outputstack'])[0]+'.odd | procKeepList | identifier_name |
satAverage.py | ineq = appiondata.ApRefineIterData()
refineq['refineRun'] = refinerundata
refineq['iteration'] = iteration
refinedata = refineq.query(results=1)
if not refinedata:
apDisplay.printError("Could not find refinedata for reconrun id="
+str(reconid)+" iter="+str(iteration))
refinepartq=appiondata.ApRefineParticleData()
refinepartq['refineIter']=refinedata[0]
apDisplay.printMsg("querying particles on "+time.asctime())
refineparticledata = refinepartq.query()
apDisplay.printMsg("saving refineparticledata to cache file")
f = open(cachefile, 'w')
cPickle.dump(refineparticledata, f)
f.close()
apDisplay.printMsg("received "+str(len(refineparticledata))+" particles in "+apDisplay.timeString(time.time()-t0))
return refineparticledata
#=====================
def procKeepList(self):
"""
Removes particles by reading a list of particle numbers generated externally.
Requirements:
the input file has one particle per line
the first piece of data is the particle number from the db
"""
keeplist = []
f = open(self.params['keeplist'], 'r')
lines = f.readlines()
f.close()
for n in lines:
words = n.split()
keeplist.append(int(words[0])+1)
return keeplist
#=====================
def makeClassAverages(self, classlist, outputstack, classdata, maskrad):
#align images in class
#print classlist
images = EMAN.readImages(classlist, -1, -1, 0)
for image in images:
image.rotateAndTranslate()
if image.isFlipped():
image.hFlip()
#make class average
avg = EMAN.EMData()
avg.makeMedian(images)
#write class average
e = EMAN.Euler()
alt = classdata['euler1']*math.pi/180
az = classdata['euler2']*math.pi/180
phi = 0.0 #classdata['euler3']*math.pi/180
e.setAngle(alt, az, phi)
avg.setRAlign(e)
avg.setNImg(len(images))
avg.applyMask(maskrad, 0)
avg.writeImage(outputstack,-1)
#=====================
def determineClasses(self, particles):
"""
Takes refineparticledata and returns a dictionary of classes
"""
apDisplay.printMsg("sorting refineparticledata into classes")
t0 = time.time()
classes={}
class_stats={}
quality=numpy.zeros(len(particles))
for partnum in range(len(particles)):
|
class_stats['meanquality']=quality.mean()
class_stats['stdquality']=quality.std()
class_stats['max']=quality.max()
class_stats['min']=quality.min()
apDisplay.printMsg("sorted %d particles into %d classes"%(len(particles), len(classes)))
### print stats
print "-- quality factor stats --"
print ("mean/std :: "+str(round(class_stats['meanquality'],2))+" +/- "
+str(round(class_stats['stdquality'],2)))
print ("min/max :: "+str(round(class_stats['min'],2))+" <> "
+str(round(class_stats['max'],2)))
apDisplay.printMsg("finished sorting in "+apDisplay.timeString(time.time()-t0))
return classes, class_stats
#=====================
def getClassData(self, reconid, iternum):
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"partclassdata-r"+str(reconid)+"-i"+str(iternum)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading particle class data from cache file", "cyan")
f = open(cachefile, 'r')
classes = cPickle.load(f)
f.close()
else:
apDisplay.printMsg("determine particle class data from database")
particles = self.getParticleInfo(reconid, iternum)
classes, cstats = self.determineClasses(particles)
f = open(cachefile, 'w')
apDisplay.printMsg("saving particle class data to cache file")
cPickle.dump(classes, f)
f.close()
apDisplay.printMsg("received "+str(len(classes))+" classes in "+apDisplay.timeString(time.time()-t0))
return classes
#######################################################
#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####
#######################################################
#=====================
def setupParserOptions(self):
self.parser.set_usage("Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\n\t "
+"[ --stackname=<name> "
+" --avgjump=<avg> --sigma=<sigma> --eotest ]")
self.parser.add_option("-r", "--reconid", dest="reconid", type="int",
help="Reconstruction run id", metavar="INT")
self.parser.add_option("-m", "--mask", dest="mask", type="int",
help="Mask radius in pixels", metavar="INT")
self.parser.add_option("-i", "--iter", dest="iter", type="int",
help="Final eulers applied to particles will come from this iteration", metavar="INT")
self.parser.add_option("--stackname", dest="stackname", default="goodavgs.hed",
help="Name of the stack to write the averages", metavar="file.hed")
self.parser.add_option("--keep-list", dest="keeplist",
help="Keep particles in the specified text file, EMAN style 0,1,...", metavar="TEXT")
self.parser.add_option("--eotest", dest="eotest", default=False,
action="store_true", help="Perform even/odd test")
#=====================
def checkConflicts(self):
if self.params['reconid'] is None:
apDisplay.printError("enter a reconstruction ID from the database")
if self.params['mask'] is None:
apDisplay.printError("enter a mask radius")
if self.params['iter'] is None:
apDisplay.printError("enter an iteration for the final Eulers")
if self.params['keeplist'] is None:
apDisplay.printError("enter an keep list file")
self.params['keeplist'] = os.path.abspath(self.params['keeplist'])
if not os.path.isfile(self.params['keeplist']):
apDisplay.printError("could not find list file")
self.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])
if self.params['stackname'][-4:] != ".hed":
s = os.path.splitext(self.params['stackname'])[0]
s += ".hed"
self.params['stackname'] = s
apDisplay.printMsg("Stack name: "+self.params['stackname'])
self.params['symmetry'] = apSymmetry.getSymmetryFromReconRunId(self.params['reconid'])
self.params['symmname'] = self.params['symmetry']['eman_name']
#=====================
def setRunDir(self):
refdata = appiondata.ApRefineRunData.direct_query(self.params['reconid'])
if not refdata:
apDisplay.printError("reconid "+str(self.params['reconid'])+" does not exist in the database")
refpath = refdata['path']['path']
rundir = os.path.join(refpath, "../../satEuler/sat-recon%d/volumes"%(self.params['reconid']))
self.params['rundir'] = os.path.abspath(rundir)
#=====================
def start(self):
self.rootname = self.params['stackname'].split(".")[0]
self.params['outputstack'] = os.path.join(self.params['rundir'], self.params['stackname'])
if os.path.isfile(self.params['outputstack']):
apFile.removeStack(self.params['outputstack'])
if self.params['eotest'] is True:
self.params['evenstack'] = os.path.splitext(self.params['outputstack'])[0]+'.even.hed'
if os.path.isfile(self.params['evenstack']):
apFile.removeStack(self.params['evenstack'])
self.params['oddstack'] = os.path.splitext(self.params['outputstack'])[0]+'.odd.h | quality[partnum] = particles[partnum]['quality_factor']
key = ("%.3f_%.3f"%(particles[partnum]['euler1'], particles[partnum]['euler2']))
if key not in classes.keys():
classes[key]={}
classes[key]['particles']=[]
classes[key]['euler1'] = particles[partnum]['euler1']
classes[key]['euler2'] = particles[partnum]['euler2']
#classes have no inplane rotation
classes[key]['euler3'] = 0.0 #particles[partnum]['euler3']
classes[key]['particles'].append(particles[partnum]) | conditional_block |
editor.rs | ()
.map(|(color_id, hex)| (color_id as u8, hex_string_to_rgb(hex).unwrap()));
// Get the xterm color with the minimum Euclidean distance to the target color
// i.e. distance = √ (r2 - r1)² + (g2 - g1)² + (b2 - b1)²
let distances = colors.map(|(color_id, (r, g, b))| {
let r_delta: u32 = (max(r, red) - min(r, red)).into();
let g_delta: u32 = (max(g, green) - min(g, green)).into();
let b_delta: u32 = (max(b, blue) - min(b, blue)).into();
let distance = r_delta.pow(2) + g_delta.pow(2) + b_delta.pow(2);
// don't need to actually take the square root for the sake of comparison
(color_id, distance)
});
Color::Fixed(distances.min_by(|(_, d1), (_, d2)| d1.cmp(d2)).unwrap().0)
}
fn parse_color(json: serde_json::Value) -> Option<Color> {
use serde_json::Value;
match json {
Value::Number(n) => n.as_u64().map(|c| Color::Fixed(c as u8)),
Value::String(s) => match s.to_lowercase().as_str() {
"black" => Some(Color::Black),
"blue" => Some(Color::Blue),
"cyan" => Some(Color::Cyan),
"green" => Some(Color::Green),
"purple" => Some(Color::Purple),
"red" => Some(Color::Red),
"white" => Some(Color::White),
"yellow" => Some(Color::Yellow),
s => {
if let Some((red, green, blue)) = hex_string_to_rgb(s) {
Some(Color::RGB(red, green, blue))
} else {
None
}
}
},
_ => None,
}
}
fn hex_string_to_rgb(s: &str) -> Option<(u8, u8, u8)> {
if s.starts_with('#') && s.len() >= 7 {
if let (Ok(red), Ok(green), Ok(blue)) = (
u8::from_str_radix(&s[1..3], 16),
u8::from_str_radix(&s[3..5], 16),
u8::from_str_radix(&s[5..7], 16),
) {
Some((red, green, blue))
} else {
None
}
} else {
None
}
}
fn style_to_css(style: ansi_term::Style) -> String {
let mut result = "style='".to_string();
if style.is_underline {
write!(&mut result, "text-decoration: underline;").unwrap();
}
if style.is_bold {
write!(&mut result, "font-weight: bold;").unwrap();
}
if style.is_italic {
write!(&mut result, "font-style: italic;").unwrap();
}
if let Some(color) = style.foreground {
write_color(&mut result, color);
}
result.push('\'');
result
}
fn write_color(buffer: &mut String, color: Color) {
if let Color::RGB(r, g, b) = &color {
write!(buffer, "color: #{:x?}{:x?}{:x?}", r, g, b).unwrap()
} else {
write!(
buffer,
"color: {}",
match color {
Color::Black => "black",
Color::Blue => "blue",
Color::Red => "red",
Color::Green => "green",
Color::Yellow => "yellow",
Color::Cyan => "cyan",
Color::Purple => "purple",
Color::White => "white",
Color::Fixed(n) => CSS_STYLES_BY_COLOR_ID[n as usize].as_str(),
Color::RGB(_, _, _) => unreachable!(),
}
)
.unwrap()
}
}
#[derive(Completer, Hinter, Validator)]
pub(crate) struct Editor {
hi: RefCell<Highlighter>,
hi_cfg: HighlightConfiguration,
hi_theme: Theme,
cmd_query: Query,
}
impl Editor {
pub fn new() -> Self {
let lang = tree_sitter_rshcmd::language();
let mut hi_cfg =
HighlightConfiguration::new(lang, tree_sitter_rshcmd::HIGHLIGHTS_QUERY, "", "")
.expect("Could not init tree sitter");
let hi_theme: Theme = Default::default();
hi_cfg.configure(&hi_theme.highlight_names);
Editor {
hi: RefCell::new(Highlighter::new()),
hi_cfg,
hi_theme,
cmd_query: Query::new(lang, r"(cmd_name (identifier) @cmd)")
.expect("error building query"),
}
}
}
struct Styling {
current: Vec<StylingChoice>,
}
struct StylingChoice {
range: Range<usize>,
style: ansi_term::Style,
prio: usize,
}
impl Styling {
fn new(_len: usize) -> Self {
Styling {
// current: vec![(0..len, (ansi_term::Style::new(), 0))],
current: Vec::new(),
}
}
fn insert(&mut self, style: ansi_term::Style, range: Range<usize>, prio: usize) {
self.current.push(StylingChoice { range, style, prio });
}
fn resolve_ranges(&self, len: usize) -> Vec<(Range<usize>, &ansi_term::Style)> {
struct StyleList<'a> {
backing: Vec<(usize, &'a ansi_term::Style, usize)>,
}
impl<'a> StyleList<'a> {
fn new<I>(i: I) -> Self
where
I: IntoIterator<Item = (usize, &'a ansi_term::Style, usize)>,
{
let mut backing: Vec<_> = i.into_iter().collect();
backing.sort_by(|a, b| b.2.cmp(&a.2));
Self { backing }
}
fn remove(&mut self, idx: usize) {
let i = self
.backing
.iter()
.enumerate()
.find(|(_, s)| s.0 == idx)
.unwrap()
.0;
self.backing.remove(i);
}
fn insert(&mut self, idx: usize, style: &'a ansi_term::Style, prio: usize) {
self.backing.push((idx, style, prio));
self.backing.sort_by(|a, b| b.2.cmp(&a.2));
}
fn current(&self) -> &'a ansi_term::Style {
self.backing[0].1
}
}
if len > 0 {
let mut start = HashMap::new();
let mut end = HashMap::new();
for (i, r) in self.current.iter().enumerate() {
start
.entry(r.range.start)
.or_insert_with(Vec::new)
.push((i, &r.style, r.prio));
end.entry(r.range.end).or_insert_with(Vec::new).push(i);
}
let mut ranges = Vec::new();
let mut rstart = 0;
let mut styles = StyleList::new(start.get(&0).unwrap().iter().copied());
for i in 1..len {
if let Some(ends) = end.get(&i) {
ranges.push((rstart..i, styles.current()));
for idx in ends {
styles.remove(*idx);
}
rstart = i;
}
if let Some(starts) = start.get(&i) {
for (idx, style, prio) in starts {
styles.insert(*idx, style, *prio);
}
}
}
ranges.push((rstart..len, styles.current()));
ranges
} else {
Vec::new()
}
}
fn paint(&self, source: &str) -> String {
let mut s = Vec::new();
for (range, style) in self.resolve_ranges(source.len()) {
style
.paint(&source.as_bytes()[range])
.write_to(&mut s)
.expect("can fail write in string?");
}
String::from_utf8(s).expect("we got UTF-8 in, hi is UTF8")
}
}
impl HiTrait for Editor {
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> std::borrow::Cow<'l, str> {
let mut hi = self.hi.borrow_mut();
let events = hi
.highlight(&self.hi_cfg, line.as_bytes(), None, |_| None)
.expect("hi failed");
let mut stylings = Styling::new(line.len());
let mut style_stack = vec![self.hi_theme.default_style().ansi];
for event in events {
match event.expect("hi failure") {
HighlightEvent::HighlightStart(kind) => {
style_stack.push(self.hi_theme.styles[kind.0].ansi);
}
HighlightEvent::HighlightEnd => { | style_stack.pop(); | random_line_split |
|
editor.rs | {
pub ansi: ansi_term::Style,
pub css: Option<String>,
}
#[derive(Debug)]
pub struct Theme {
pub styles: Vec<Style>,
pub highlight_names: Vec<String>,
}
#[derive(Default, Deserialize, Serialize)]
pub struct ThemeConfig {
#[serde(default)]
pub theme: Theme,
}
impl Theme {
/* pub fn load(path: &Path) -> std::io::Result<Self> {
let json = std::fs::read_to_string(path)?;
Ok(serde_json::from_str(&json).unwrap_or_default())
} */
pub fn default_style(&self) -> Style {
Style::default()
}
}
impl<'de> Deserialize<'de> for Theme {
fn deserialize<D>(deserializer: D) -> std::result::Result<Theme, D::Error>
where
D: Deserializer<'de>,
{
let mut styles = Vec::new();
let mut highlight_names = Vec::new();
if let Ok(colors) = HashMap::<String, serde_json::Value>::deserialize(deserializer) {
highlight_names.reserve(colors.len());
styles.reserve(colors.len());
for (name, style_value) in colors {
let mut style = Style::default();
parse_style(&mut style, style_value);
highlight_names.push(name);
styles.push(style);
}
}
Ok(Self {
styles,
highlight_names,
})
}
}
impl Serialize for Theme {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(self.styles.len()))?;
for (name, style) in self.highlight_names.iter().zip(&self.styles) {
let style = &style.ansi;
let color = style.foreground.map(|color| match color {
Color::Black => json!("black"),
Color::Blue => json!("blue"),
Color::Cyan => json!("cyan"),
Color::Green => json!("green"),
Color::Purple => json!("purple"),
Color::Red => json!("red"),
Color::White => json!("white"),
Color::Yellow => json!("yellow"),
Color::RGB(r, g, b) => json!(format!("#{:x?}{:x?}{:x?}", r, g, b)),
Color::Fixed(n) => json!(n),
});
if style.is_bold || style.is_italic || style.is_underline {
let mut style_json = HashMap::new();
if let Some(color) = color {
style_json.insert("color", color);
}
if style.is_bold {
style_json.insert("bold", serde_json::Value::Bool(true));
}
if style.is_italic {
style_json.insert("italic", serde_json::Value::Bool(true));
}
if style.is_underline {
style_json.insert("underline", serde_json::Value::Bool(true));
}
map.serialize_entry(&name, &style_json)?;
} else if let Some(color) = color {
map.serialize_entry(&name, &color)?;
} else {
map.serialize_entry(&name, &serde_json::Value::Null)?;
}
}
map.end()
}
}
impl Default for Theme {
fn default() -> Self {
serde_json::from_str(
r#"
{
"attribute": {"color": 124, "italic": true},
"comment": {"color": 245, "italic": true},
"constant.builtin": {"color": 94, "bold": true},
"constant": 94,
"constructor": 136,
"embedded": null,
"function.builtin": {"color": 26, "bold": true},
"function": 26,
"keyword": 56,
"number": {"color": 94, "bold": true},
"property": 124,
"operator": {"color": 239, "bold": true},
"punctuation.bracket": 239,
"punctuation.delimiter": 239,
"string.special": 30,
"string": 28,
"tag": 18,
"type": 23,
"type.builtin": {"color": 23, "bold": true},
"variable.builtin": {"bold": true},
"variable.parameter": {"underline": true}
}
"#,
)
.unwrap()
}
}
fn parse_style(style: &mut Style, json: serde_json::Value) {
use serde_json::Value;
if let Value::Object(entries) = json {
for (property_name, value) in entries {
match property_name.as_str() {
"bold" => {
if value == Value::Bool(true) {
style.ansi = style.ansi.bold()
}
}
"italic" => {
if value == Value::Bool(true) {
style.ansi = style.ansi.italic()
}
}
"underline" => {
if value == Value::Bool(true) {
style.ansi = style.ansi.underline()
}
}
"color" => {
if let Some(color) = parse_color(value) {
style.ansi = style.ansi.fg(color);
}
}
_ => {}
}
}
style.css = Some(style_to_css(style.ansi));
} else if let Some(color) = parse_color(json) {
style.ansi = style.ansi.fg(color);
style.css = Some(style_to_css(style.ansi));
} else {
style.css = None;
}
if let Some(Color::RGB(red, green, blue)) = style.ansi.foreground {
if !terminal_supports_truecolor() {
style.ansi = style.ansi.fg(closest_xterm_color(red, green, blue));
}
}
}
fn terminal_supports_truecolor() -> bool {
use std::env;
if let Ok(truecolor) = env::var("COLORTERM") {
truecolor == "truecolor" || truecolor == "24bit"
} else {
false
}
}
fn closest_xterm_color(red: u8, green: u8, blue: u8) -> Color {
use std::cmp::{max, min};
let colors = CSS_STYLES_BY_COLOR_ID
.iter()
.enumerate()
.map(|(color_id, hex)| (color_id as u8, hex_string_to_rgb(hex).unwrap()));
// Get the xterm color with the minimum Euclidean distance to the target color
// i.e. distance = √ (r2 - r1)² + (g2 - g1)² + (b2 - b1)²
let distances = colors.map(|(color_id, (r, g, b))| {
let r_delta: u32 = (max(r, red) - min(r, red)).into();
let g_delta: u32 = (max(g, green) - min(g, green)).into();
let b_delta: u32 = (max(b, blue) - min(b, blue)).into();
let distance = r_delta.pow(2) + g_delta.pow(2) + b_delta.pow(2);
// don't need to actually take the square root for the sake of comparison
(color_id, distance)
});
Color::Fixed(distances.min_by(|(_, d1), (_, d2)| d1.cmp(d2)).unwrap().0)
}
fn parse_color(json: serde_json::Value) -> Option<Color> {
use serde_json::Value;
match json {
Value::Number(n) => n.as_u64().map(|c| Color::Fixed(c as u8)),
Value::String(s) => match s.to_lowercase().as_str() {
"black" => Some(Color::Black),
"blue" => Some(Color::Blue),
"cyan" => Some(Color::Cyan),
"green" => Some(Color::Green),
"purple" => Some(Color::Purple),
"red" => Some(Color::Red),
"white" => Some(Color::White),
"yellow" => Some(Color::Yellow),
s => {
if let Some((red, green, blue)) = hex_string_to_rgb(s) {
Some(Color::RGB(red, green, blue))
} else {
None
}
}
},
_ => None,
}
}
fn hex_string_to_rgb(s: &str) -> Option<(u8, u8, u8)> {
if s.starts_with('#') && s.len() >= 7 {
if let (Ok(red), Ok(green), Ok(blue)) = (
u8::from_str_radix(&s[1..3], 16),
u8::from_str_radix(&s[3..5], 16),
u8::from_str_radix(&s[5..7], 16),
) {
Some((red, green, blue))
} else {
None
}
} else {
None
}
}
fn style_to_css(style: ansi_term::Style) -> String {
let mut result = "style='".to_string();
if style.is_underline {
write!(&mut result, "text-decoration: underline;").unwrap();
}
| Style | identifier_name |
|
editor.rs | ": 239,
"punctuation.delimiter": 239,
"string.special": 30,
"string": 28,
"tag": 18,
"type": 23,
"type.builtin": {"color": 23, "bold": true},
"variable.builtin": {"bold": true},
"variable.parameter": {"underline": true}
}
"#,
)
.unwrap()
}
}
fn parse_style(style: &mut Style, json: serde_json::Value) {
use serde_json::Value;
if let Value::Object(entries) = json {
for (property_name, value) in entries {
match property_name.as_str() {
"bold" => {
if value == Value::Bool(true) {
style.ansi = style.ansi.bold()
}
}
"italic" => {
if value == Value::Bool(true) {
style.ansi = style.ansi.italic()
}
}
"underline" => {
if value == Value::Bool(true) {
style.ansi = style.ansi.underline()
}
}
"color" => {
if let Some(color) = parse_color(value) {
style.ansi = style.ansi.fg(color);
}
}
_ => {}
}
}
style.css = Some(style_to_css(style.ansi));
} else if let Some(color) = parse_color(json) {
style.ansi = style.ansi.fg(color);
style.css = Some(style_to_css(style.ansi));
} else {
style.css = None;
}
if let Some(Color::RGB(red, green, blue)) = style.ansi.foreground {
if !terminal_supports_truecolor() {
style.ansi = style.ansi.fg(closest_xterm_color(red, green, blue));
}
}
}
fn terminal_supports_truecolor() -> bool {
use std::env;
if let Ok(truecolor) = env::var("COLORTERM") {
truecolor == "truecolor" || truecolor == "24bit"
} else {
false
}
}
fn closest_xterm_color(red: u8, green: u8, blue: u8) -> Color {
use std::cmp::{max, min};
let colors = CSS_STYLES_BY_COLOR_ID
.iter()
.enumerate()
.map(|(color_id, hex)| (color_id as u8, hex_string_to_rgb(hex).unwrap()));
// Get the xterm color with the minimum Euclidean distance to the target color
// i.e. distance = √ (r2 - r1)² + (g2 - g1)² + (b2 - b1)²
let distances = colors.map(|(color_id, (r, g, b))| {
let r_delta: u32 = (max(r, red) - min(r, red)).into();
let g_delta: u32 = (max(g, green) - min(g, green)).into();
let b_delta: u32 = (max(b, blue) - min(b, blue)).into();
let distance = r_delta.pow(2) + g_delta.pow(2) + b_delta.pow(2);
// don't need to actually take the square root for the sake of comparison
(color_id, distance)
});
Color::Fixed(distances.min_by(|(_, d1), (_, d2)| d1.cmp(d2)).unwrap().0)
}
fn parse_color(json: serde_json::Value) -> Option<Color> {
use serde_json::Value;
match json {
Value::Number(n) => n.as_u64().map(|c| Color::Fixed(c as u8)),
Value::String(s) => match s.to_lowercase().as_str() {
"black" => Some(Color::Black),
"blue" => Some(Color::Blue),
"cyan" => Some(Color::Cyan),
"green" => Some(Color::Green),
"purple" => Some(Color::Purple),
"red" => Some(Color::Red),
"white" => Some(Color::White),
"yellow" => Some(Color::Yellow),
s => {
if let Some((red, green, blue)) = hex_string_to_rgb(s) {
Some(Color::RGB(red, green, blue))
} else {
None
}
}
},
_ => None,
}
}
fn hex_string_to_rgb(s: &str) -> Option<(u8, u8, u8)> {
if s.starts_with('#') && s.len() >= 7 {
if let (Ok(red), Ok(green), Ok(blue)) = (
u8::from_str_radix(&s[1..3], 16),
u8::from_str_radix(&s[3..5], 16),
u8::from_str_radix(&s[5..7], 16),
) {
Some((red, green, blue))
} else {
None
}
} else {
None
}
}
fn style_to_css(style: ansi_term::Style) -> String {
let mut result = "style='".to_string();
if style.is_underline {
write!(&mut result, "text-decoration: underline;").unwrap();
}
if style.is_bold {
write!(&mut result, "font-weight: bold;").unwrap();
}
if style.is_italic {
write!(&mut result, "font-style: italic;").unwrap();
}
if let Some(color) = style.foreground {
write_color(&mut result, color);
}
result.push('\'');
result
}
fn write_color(buffer: &mut String, color: Color) {
if let Color::RGB(r, g, b) = &color {
write!(buffer, "color: #{:x?}{:x?}{:x?}", r, g, b).unwrap()
} else {
write!(
buffer,
"color: {}",
match color {
Color::Black => "black",
Color::Blue => "blue",
Color::Red => "red",
Color::Green => "green",
Color::Yellow => "yellow",
Color::Cyan => "cyan",
Color::Purple => "purple",
Color::White => "white",
Color::Fixed(n) => CSS_STYLES_BY_COLOR_ID[n as usize].as_str(),
Color::RGB(_, _, _) => unreachable!(),
}
)
.unwrap()
}
}
#[derive(Completer, Hinter, Validator)]
pub(crate) struct Editor {
hi: RefCell<Highlighter>,
hi_cfg: HighlightConfiguration,
hi_theme: Theme,
cmd_query: Query,
}
impl Editor {
pub fn new() -> Self {
let lang = tree_sitter_rshcmd::language();
let mut hi_cfg =
HighlightConfiguration::new(lang, tree_sitter_rshcmd::HIGHLIGHTS_QUERY, "", "")
.expect("Could not init tree sitter");
let hi_theme: Theme = Default::default();
hi_cfg.configure(&hi_theme.highlight_names);
Editor {
hi: RefCell::new(Highlighter::new()),
hi_cfg,
hi_theme,
cmd_query: Query::new(lang, r"(cmd_name (identifier) @cmd)")
.expect("error building query"),
}
}
}
struct Styling {
current: Vec<StylingChoice>,
}
struct StylingChoice {
range: Range<usize>,
style: ansi_term::Style,
prio: usize,
}
impl Styling {
fn new(_len: usize) -> Self {
Styling {
// current: vec![(0..len, (ansi_term::Style::new(), 0))],
current: Vec::new(),
}
}
fn insert(&mut self, style: ansi_term::Style, range: Range<usize>, prio: usize) {
self.current.push(StylingChoice { range, style, prio });
}
fn resolve_ranges(&self, len: usize) -> Vec<(Range<usize>, &ansi_term::Style)> {
struct StyleList<'a> {
backing: Vec<(usize, &'a ansi_term::Style, usize)>,
}
impl<'a> StyleList<'a> {
fn new<I>(i: I) -> Self
where
I: IntoIterator<Item = (usize, &'a ansi_term::Style, usize)>,
{
let mut backing: Vec<_> = i.into_iter().collect();
backing.sort_by(|a, b| b.2.cmp(&a.2));
Self { backing }
}
fn remove(&mut self, idx: usize) {
let i = self
.backing
.iter()
.enumerate()
.find(|(_, s)| s.0 == idx)
.unwrap()
.0;
self.backing.remove(i);
}
fn insert(&mut self, idx: usize, style: &'a ansi_term::Style, prio: usize) {
self.backing.push((idx, style, prio));
self.backing.sort_by(|a, b| b.2.cmp(&a.2));
}
fn current(&self) -> &'a ansi_term::Style {
| self.backing[0].1
}
| identifier_body |
|
calendar_appointment.py | ][0] - tolerance:
return -1
if end_dt >= intervals[-1][1] + tolerance:
return -1
return recursive_find_index(0, len(intervals) - 1)
if not intervals:
return False
tolerance = timedelta(minutes=1)
start_index = find_start_index()
if start_index != -1:
for index in range(start_index, len(intervals)):
if intervals[index][1] >= end_dt - tolerance:
return True
if len(intervals) == index + 1 or intervals[index + 1][0] - intervals[index][1] > tolerance:
return False
return False
def is_calendar_available(slot, events, employee):
""" Returns True if the given slot doesn't collide with given events for the employee
"""
start_dt = slot['UTC'][0]
end_dt = slot['UTC'][1]
event_in_scope = lambda ev: (
fields.Date.to_date(ev.start) <= fields.Date.to_date(end_dt)
and fields.Date.to_date(ev.stop) >= fields.Date.to_date(start_dt)
)
for ev in events.filtered(event_in_scope):
if ev.allday:
# allday events are considered to take the whole day in the related employee's timezone
event_tz = pytz.timezone(ev.event_tz or employee.user_id.tz or self.env.user.tz or slot['slot'].appointment_type_id.appointment_tz or 'UTC')
ev_start_dt = datetime.combine(fields.Date.from_string(ev.start_date), time.min)
ev_stop_dt = datetime.combine(fields.Date.from_string(ev.stop_date), time.max)
ev_start_dt = event_tz.localize(ev_start_dt).astimezone(pytz.UTC).replace(tzinfo=None)
ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz.UTC).replace(tzinfo=None)
if ev_start_dt < end_dt and ev_stop_dt > start_dt:
return False
elif fields.Datetime.to_datetime(ev.start) < end_dt and fields.Datetime.to_datetime(ev.stop) > start_dt:
return False
return True
workhours = {}
meetings = {}
# With context will be used in resource.calendar to force the referential user
# for work interval computing to the *user linked to the employee*
available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in (employee or self.employee_ids)]
random.shuffle(available_employees)
for slot in slots:
for emp_pos, emp in enumerate(available_employees):
if emp_pos not in workhours:
workhours[emp_pos] = [
(interval[0].astimezone(pytz.UTC).replace(tzinfo=None),
interval[1].astimezone(pytz.UTC).replace(tzinfo=None))
for interval in emp.resource_calendar_id._work_intervals_batch(
first_day, last_day, resources=emp.resource_id,
)[emp.resource_id.id]
]
if is_work_available(slot['UTC'][0], slot['UTC'][1], workhours[emp_pos]):
if emp_pos not in meetings:
# note: no check is made on the attendee's status (accepted/declined/...)
meetings[emp_pos] = self.env['calendar.event'].search([
('partner_ids.user_ids', '=', emp.user_id.id),
('start', '<', fields.Datetime.to_string(last_day.replace(hour=23, minute=59, second=59))),
('stop', '>', fields.Datetime.to_string(first_day.replace(hour=0, minute=0, second=0)))
])
if is_calendar_available(slot, meetings[emp_pos], emp):
slot['employee_id'] = emp
break
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(hours=self.min_schedule_hours))
last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(days=self.max_schedule_days))
# Compute available slots (ordered)
slots = self._slots_generate(first_day.astimezone(appt_tz), last_day.astimezone(appt_tz), timezone)
if not employee or employee in self.employee_ids:
self._slots_available(slots, first_day.astimezone(pytz.UTC), last_day.astimezone(pytz.UTC), employee)
# Compute calendar rendering and inject available slots
today = requested_tz.fromutc(datetime.utcnow())
start = today
month_dates_calendar = cal.Calendar(0).monthdatescalendar
months = []
while (start.year, start.month) <= (last_day.year, last_day.month):
dates = month_dates_calendar(start.year, start.month)
for week_index, week in enumerate(dates):
for day_index, day in enumerate(week):
mute_cls = weekend_cls = today_cls = None
today_slots = []
if day.weekday() in (cal.SUNDAY, cal.SATURDAY):
weekend_cls = 'o_weekend'
if day == today.date() and day.month == today.month:
today_cls = 'o_today'
if day.month != start.month:
mute_cls = 'text-muted o_mute_day'
else:
# slots are ordered, so check all unprocessed slots from until > day
while slots and (slots[0][timezone][0].date() <= day):
if (slots[0][timezone][0].date() == day) and ('employee_id' in slots[0]):
today_slots.append({
'employee_id': slots[0]['employee_id'].id,
'datetime': slots[0][timezone][0].strftime('%Y-%m-%d %H:%M:%S'),
'hours': slots[0][timezone][0].strftime('%H:%M')
})
slots.pop(0)
dates[week_index][day_index] = {
'day': day,
'slots': today_slots,
'mute_cls': mute_cls,
'weekend_cls': weekend_cls,
'today_cls': today_cls
}
months.append({
'month': format_datetime(start, 'MMMM Y', locale=get_lang(self.env).code),
'weeks': dates
})
start = start + relativedelta(months=1)
return months
class CalendarAppointmentSlot(models.Model):
_name = "calendar.appointment.slot"
_description = "Online Appointment : Time Slot"
_rec_name = "weekday"
_order = "weekday, hour"
appointment_type_id = fields.Many2one('calendar.appointment.type', 'Appointment Type', ondelete='cascade')
weekday = fields.Selection([
('1', 'Monday'),
('2', 'Tuesday'),
('3', 'Wednesday'),
('4', 'Thursday'),
('5', 'Friday'),
('6', 'Saturday'),
('7', 'Sunday'),
], string='Week Day', required=True)
hour = fields.Float('Starting Hour', required=True, default=8.0)
@api.constrains('hour')
def check_hour(self):
if any(slot.hour < 0.00 or slot.hour >= 24.00 for slot in self):
raise ValidationError(_("Please enter a valid hour between 0:00 and 24:00 for your slots."))
def name_get(self):
weekdays = dict(self._fields['weekday'].selection)
return self.mapped(lambda slot: (slot.id, "%s, %02d:%02d" % (weekdays.get(slot.weekday), int(slot.hour), int(round((slot.hour % 1) * 60)))))
class CalendarAppointmentQuestion(models.Model):
_name = "calendar.appointment.question"
_description = "Online Appointment : Questions"
_order = "sequence"
sequence = fields.Integer('Sequence')
appointment_type_id = fields.Many2one('calendar.appointment.type', 'Appointment Type', ondelete="cascade")
name = fields.Char('Question', translate=True, required=True)
placeholder = fields.Char('Placeholder', translate=True)
question_required = fields.Boolean('Required Answer')
question_type = fields.Selection([
('char', 'Single line text'),
('text', 'Multi-line text'),
('select', 'Dropdown (one answer)'),
('radio', 'Radio (one answer)'),
('checkbox', 'Checkboxes (multiple answers)')], 'Question Type', default='char')
answer_ids = fields.Many2many('calendar.appointment.answer', 'calendar_appointment_question_answer_rel', 'question_id', 'answer_id', string='Available Answers')
class CalendarAppointmentAnswer(models.Model):
| _name = "calendar.appointment.answer"
_description = "Online Appointment : Answers"
question_id = fields.Many2many('calendar.appointment.question', 'calendar_appointment_question_answer_rel', 'answer_id', 'question_id', string='Questions')
name = fields.Char('Answer', translate=True, required=True) | identifier_body |
|
calendar_appointment.py | help="Timezone where appointment take place")
employee_ids = fields.Many2many('hr.employee', 'website_calendar_type_employee_rel', domain=[('user_id', '!=', False)], string='Employees')
assignation_method = fields.Selection([
('random', 'Random'),
('chosen', 'Chosen by the Customer')], string='Assignment Method', default='random',
help="How employees will be assigned to meetings customers book on your website.")
appointment_count = fields.Integer('# Appointments', compute='_compute_appointment_count')
def _compute_appointment_count(self):
meeting_data = self.env['calendar.event'].read_group([('appointment_type_id', 'in', self.ids)], ['appointment_type_id'], ['appointment_type_id'])
mapped_data = {m['appointment_type_id'][0]: m['appointment_type_id_count'] for m in meeting_data}
for appointment_type in self:
appointment_type.appointment_count = mapped_data.get(appointment_type.id, 0)
def _compute_website_url(self):
super(CalendarAppointmentType, self)._compute_website_url()
for appointment_type in self:
if appointment_type.id :
appointment_type.website_url = '/calendar/%s/appointment' % (slug(appointment_type),)
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
default = default or {}
default['name'] = self.name + _(' (copy)')
return super(CalendarAppointmentType, self).copy(default=default)
def action_calendar_meetings(self):
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event")
action['context'] = {
'default_appointment_type_id': self.id,
'search_default_appointment_type_id': self.id
}
return action
# --------------------------------------
# Slots Generation
# --------------------------------------
def _slots_generate(self, first_day, last_day, timezone):
""" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)
between first_day and last_day (datetimes in appointment timezone)
:return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},
... ]
"""
def append_slot(day, slot):
local_start = appt_tz.localize(datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))))
local_end = appt_tz.localize(
datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))) + relativedelta(hours=self.appointment_duration))
slots.append({
self.appointment_tz: (
local_start,
local_end,
),
timezone: (
local_start.astimezone(requested_tz),
local_end.astimezone(requested_tz),
),
'UTC': (
local_start.astimezone(pytz.UTC).replace(tzinfo=None),
local_end.astimezone(pytz.UTC).replace(tzinfo=None),
),
'slot': slot,
})
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
slots = []
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == first_day.isoweekday()):
|
slot_weekday = [int(weekday) - 1 for weekday in self.slot_ids.mapped('weekday')]
for day in rrule.rrule(rrule.DAILY,
dtstart=first_day.date() + timedelta(days=1),
until=last_day.date(),
byweekday=slot_weekday):
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == day.isoweekday()):
append_slot(day, slot)
return slots
def _slots_available(self, slots, first_day, last_day, employee=None):
""" Fills the slot stucture with an available employee
:param slots: slots structure generated by _slots_generate
:param first_day: start datetime in UTC
:param last_day: end datetime in UTC
:param employee: if set, only consider this employee
if not set, consider all employees assigned to this appointment type
"""
def is_work_available(start_dt, end_dt, intervals):
""" check if the slot is contained in the employee's work hours (defined by intervals)
"""
def find_start_index():
""" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt
"""
def recursive_find_index(lower_bound, upper_bound):
if upper_bound - lower_bound <= 1:
if intervals[upper_bound][0] <= start_dt:
return upper_bound
return lower_bound
index = (upper_bound + lower_bound) // 2
if intervals[index][0] <= start_dt:
return recursive_find_index(index, upper_bound)
else:
return recursive_find_index(lower_bound, index)
if start_dt <= intervals[0][0] - tolerance:
return -1
if end_dt >= intervals[-1][1] + tolerance:
return -1
return recursive_find_index(0, len(intervals) - 1)
if not intervals:
return False
tolerance = timedelta(minutes=1)
start_index = find_start_index()
if start_index != -1:
for index in range(start_index, len(intervals)):
if intervals[index][1] >= end_dt - tolerance:
return True
if len(intervals) == index + 1 or intervals[index + 1][0] - intervals[index][1] > tolerance:
return False
return False
def is_calendar_available(slot, events, employee):
""" Returns True if the given slot doesn't collide with given events for the employee
"""
start_dt = slot['UTC'][0]
end_dt = slot['UTC'][1]
event_in_scope = lambda ev: (
fields.Date.to_date(ev.start) <= fields.Date.to_date(end_dt)
and fields.Date.to_date(ev.stop) >= fields.Date.to_date(start_dt)
)
for ev in events.filtered(event_in_scope):
if ev.allday:
# allday events are considered to take the whole day in the related employee's timezone
event_tz = pytz.timezone(ev.event_tz or employee.user_id.tz or self.env.user.tz or slot['slot'].appointment_type_id.appointment_tz or 'UTC')
ev_start_dt = datetime.combine(fields.Date.from_string(ev.start_date), time.min)
ev_stop_dt = datetime.combine(fields.Date.from_string(ev.stop_date), time.max)
ev_start_dt = event_tz.localize(ev_start_dt).astimezone(pytz.UTC).replace(tzinfo=None)
ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz.UTC).replace(tzinfo=None)
if ev_start_dt < end_dt and ev_stop_dt > start_dt:
return False
elif fields.Datetime.to_datetime(ev.start) < end_dt and fields.Datetime.to_datetime(ev.stop) > start_dt:
return False
return True
workhours = {}
meetings = {}
# With context will be used in resource.calendar to force the referential user
# for work interval computing to the *user linked to the employee*
available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in (employee or self.employee_ids)]
random.shuffle(available_employees)
for slot in slots:
for emp_pos, emp in enumerate(available_employees):
if emp_pos not in workhours:
workhours[emp_pos] = [
(interval[0].astimezone(pytz.UTC).replace(tzinfo=None),
interval[1].astimezone(pytz.UTC).replace(tzinfo=None))
for interval in emp.resource_calendar_id._work_intervals_batch(
first_day, last_day, resources=emp.resource_id,
)[emp.resource_id.id]
]
if is_work_available(slot['UTC'][0], slot['UTC'][1], workhours[emp_pos]):
if emp_pos not in meetings:
# note: no check is made on the attendee's status (accepted/declined/...)
meetings[emp_pos] = self.env['calendar.event'].search([
('partner_ids.user_ids', '=', emp.user_id.id),
('start', '<', fields.Datetime.to_string(last_day.replace(hour=23, minute=59, second=59))),
('stop', '>', fields.Datetime.to_string(first_day.replace(hour=0, minute=0, second=0)))
])
if is_calendar_available(slot, meetings[emp_pos], emp):
slot['employee_id'] = emp
break
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_t | if slot.hour > first_day.hour + first_day.minute / 60.0:
append_slot(first_day.date(), slot) | conditional_block |
calendar_appointment.py | ,
help="Timezone where appointment take place")
employee_ids = fields.Many2many('hr.employee', 'website_calendar_type_employee_rel', domain=[('user_id', '!=', False)], string='Employees')
assignation_method = fields.Selection([
('random', 'Random'),
('chosen', 'Chosen by the Customer')], string='Assignment Method', default='random',
help="How employees will be assigned to meetings customers book on your website.")
appointment_count = fields.Integer('# Appointments', compute='_compute_appointment_count')
def _compute_appointment_count(self):
meeting_data = self.env['calendar.event'].read_group([('appointment_type_id', 'in', self.ids)], ['appointment_type_id'], ['appointment_type_id'])
mapped_data = {m['appointment_type_id'][0]: m['appointment_type_id_count'] for m in meeting_data}
for appointment_type in self:
appointment_type.appointment_count = mapped_data.get(appointment_type.id, 0)
def _compute_website_url(self):
super(CalendarAppointmentType, self)._compute_website_url()
for appointment_type in self:
if appointment_type.id :
appointment_type.website_url = '/calendar/%s/appointment' % (slug(appointment_type),)
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
default = default or {}
default['name'] = self.name + _(' (copy)')
return super(CalendarAppointmentType, self).copy(default=default)
def action_calendar_meetings(self):
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event")
action['context'] = {
'default_appointment_type_id': self.id,
'search_default_appointment_type_id': self.id
}
return action
# --------------------------------------
# Slots Generation
# --------------------------------------
def _slots_generate(self, first_day, last_day, timezone):
""" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)
between first_day and last_day (datetimes in appointment timezone)
:return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},
... ]
"""
def append_slot(day, slot):
local_start = appt_tz.localize(datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))))
local_end = appt_tz.localize(
datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))) + relativedelta(hours=self.appointment_duration))
slots.append({
self.appointment_tz: (
local_start,
local_end,
),
timezone: (
local_start.astimezone(requested_tz),
local_end.astimezone(requested_tz),
),
'UTC': (
local_start.astimezone(pytz.UTC).replace(tzinfo=None),
local_end.astimezone(pytz.UTC).replace(tzinfo=None),
),
'slot': slot,
})
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
slots = []
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == first_day.isoweekday()):
if slot.hour > first_day.hour + first_day.minute / 60.0:
append_slot(first_day.date(), slot)
slot_weekday = [int(weekday) - 1 for weekday in self.slot_ids.mapped('weekday')]
for day in rrule.rrule(rrule.DAILY,
dtstart=first_day.date() + timedelta(days=1),
until=last_day.date(),
byweekday=slot_weekday):
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == day.isoweekday()):
append_slot(day, slot)
return slots
def | (self, slots, first_day, last_day, employee=None):
""" Fills the slot stucture with an available employee
:param slots: slots structure generated by _slots_generate
:param first_day: start datetime in UTC
:param last_day: end datetime in UTC
:param employee: if set, only consider this employee
if not set, consider all employees assigned to this appointment type
"""
def is_work_available(start_dt, end_dt, intervals):
""" check if the slot is contained in the employee's work hours (defined by intervals)
"""
def find_start_index():
""" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt
"""
def recursive_find_index(lower_bound, upper_bound):
if upper_bound - lower_bound <= 1:
if intervals[upper_bound][0] <= start_dt:
return upper_bound
return lower_bound
index = (upper_bound + lower_bound) // 2
if intervals[index][0] <= start_dt:
return recursive_find_index(index, upper_bound)
else:
return recursive_find_index(lower_bound, index)
if start_dt <= intervals[0][0] - tolerance:
return -1
if end_dt >= intervals[-1][1] + tolerance:
return -1
return recursive_find_index(0, len(intervals) - 1)
if not intervals:
return False
tolerance = timedelta(minutes=1)
start_index = find_start_index()
if start_index != -1:
for index in range(start_index, len(intervals)):
if intervals[index][1] >= end_dt - tolerance:
return True
if len(intervals) == index + 1 or intervals[index + 1][0] - intervals[index][1] > tolerance:
return False
return False
def is_calendar_available(slot, events, employee):
""" Returns True if the given slot doesn't collide with given events for the employee
"""
start_dt = slot['UTC'][0]
end_dt = slot['UTC'][1]
event_in_scope = lambda ev: (
fields.Date.to_date(ev.start) <= fields.Date.to_date(end_dt)
and fields.Date.to_date(ev.stop) >= fields.Date.to_date(start_dt)
)
for ev in events.filtered(event_in_scope):
if ev.allday:
# allday events are considered to take the whole day in the related employee's timezone
event_tz = pytz.timezone(ev.event_tz or employee.user_id.tz or self.env.user.tz or slot['slot'].appointment_type_id.appointment_tz or 'UTC')
ev_start_dt = datetime.combine(fields.Date.from_string(ev.start_date), time.min)
ev_stop_dt = datetime.combine(fields.Date.from_string(ev.stop_date), time.max)
ev_start_dt = event_tz.localize(ev_start_dt).astimezone(pytz.UTC).replace(tzinfo=None)
ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz.UTC).replace(tzinfo=None)
if ev_start_dt < end_dt and ev_stop_dt > start_dt:
return False
elif fields.Datetime.to_datetime(ev.start) < end_dt and fields.Datetime.to_datetime(ev.stop) > start_dt:
return False
return True
workhours = {}
meetings = {}
# With context will be used in resource.calendar to force the referential user
# for work interval computing to the *user linked to the employee*
available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in (employee or self.employee_ids)]
random.shuffle(available_employees)
for slot in slots:
for emp_pos, emp in enumerate(available_employees):
if emp_pos not in workhours:
workhours[emp_pos] = [
(interval[0].astimezone(pytz.UTC).replace(tzinfo=None),
interval[1].astimezone(pytz.UTC).replace(tzinfo=None))
for interval in emp.resource_calendar_id._work_intervals_batch(
first_day, last_day, resources=emp.resource_id,
)[emp.resource_id.id]
]
if is_work_available(slot['UTC'][0], slot['UTC'][1], workhours[emp_pos]):
if emp_pos not in meetings:
# note: no check is made on the attendee's status (accepted/declined/...)
meetings[emp_pos] = self.env['calendar.event'].search([
('partner_ids.user_ids', '=', emp.user_id.id),
('start', '<', fields.Datetime.to_string(last_day.replace(hour=23, minute=59, second=59))),
('stop', '>', fields.Datetime.to_string(first_day.replace(hour=0, minute=0, second=0)))
])
if is_calendar_available(slot, meetings[emp_pos], emp):
slot['employee_id'] = emp
break
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_t | _slots_available | identifier_name |
calendar_appointment.py | ,
help="Timezone where appointment take place")
employee_ids = fields.Many2many('hr.employee', 'website_calendar_type_employee_rel', domain=[('user_id', '!=', False)], string='Employees')
assignation_method = fields.Selection([
('random', 'Random'),
('chosen', 'Chosen by the Customer')], string='Assignment Method', default='random',
help="How employees will be assigned to meetings customers book on your website.")
appointment_count = fields.Integer('# Appointments', compute='_compute_appointment_count')
def _compute_appointment_count(self):
meeting_data = self.env['calendar.event'].read_group([('appointment_type_id', 'in', self.ids)], ['appointment_type_id'], ['appointment_type_id'])
mapped_data = {m['appointment_type_id'][0]: m['appointment_type_id_count'] for m in meeting_data}
for appointment_type in self:
appointment_type.appointment_count = mapped_data.get(appointment_type.id, 0)
def _compute_website_url(self):
super(CalendarAppointmentType, self)._compute_website_url()
for appointment_type in self:
if appointment_type.id :
appointment_type.website_url = '/calendar/%s/appointment' % (slug(appointment_type),)
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
default = default or {}
default['name'] = self.name + _(' (copy)')
return super(CalendarAppointmentType, self).copy(default=default)
def action_calendar_meetings(self):
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event")
action['context'] = {
'default_appointment_type_id': self.id,
'search_default_appointment_type_id': self.id
}
return action
# --------------------------------------
# Slots Generation
# --------------------------------------
def _slots_generate(self, first_day, last_day, timezone):
""" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)
between first_day and last_day (datetimes in appointment timezone)
:return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},
... ]
"""
def append_slot(day, slot):
local_start = appt_tz.localize(datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))))
local_end = appt_tz.localize(
datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))) + relativedelta(hours=self.appointment_duration))
slots.append({
self.appointment_tz: (
local_start,
local_end,
),
timezone: (
local_start.astimezone(requested_tz),
local_end.astimezone(requested_tz),
),
'UTC': (
local_start.astimezone(pytz.UTC).replace(tzinfo=None),
local_end.astimezone(pytz.UTC).replace(tzinfo=None),
),
'slot': slot,
})
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
slots = []
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == first_day.isoweekday()):
if slot.hour > first_day.hour + first_day.minute / 60.0:
append_slot(first_day.date(), slot)
slot_weekday = [int(weekday) - 1 for weekday in self.slot_ids.mapped('weekday')]
for day in rrule.rrule(rrule.DAILY,
dtstart=first_day.date() + timedelta(days=1),
until=last_day.date(),
byweekday=slot_weekday):
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == day.isoweekday()):
append_slot(day, slot)
return slots
def _slots_available(self, slots, first_day, last_day, employee=None):
""" Fills the slot stucture with an available employee
:param slots: slots structure generated by _slots_generate
:param first_day: start datetime in UTC
:param last_day: end datetime in UTC
:param employee: if set, only consider this employee
if not set, consider all employees assigned to this appointment type
"""
def is_work_available(start_dt, end_dt, intervals):
""" check if the slot is contained in the employee's work hours (defined by intervals)
"""
def find_start_index():
""" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt
"""
def recursive_find_index(lower_bound, upper_bound):
if upper_bound - lower_bound <= 1:
if intervals[upper_bound][0] <= start_dt:
return upper_bound | index = (upper_bound + lower_bound) // 2
if intervals[index][0] <= start_dt:
return recursive_find_index(index, upper_bound)
else:
return recursive_find_index(lower_bound, index)
if start_dt <= intervals[0][0] - tolerance:
return -1
if end_dt >= intervals[-1][1] + tolerance:
return -1
return recursive_find_index(0, len(intervals) - 1)
if not intervals:
return False
tolerance = timedelta(minutes=1)
start_index = find_start_index()
if start_index != -1:
for index in range(start_index, len(intervals)):
if intervals[index][1] >= end_dt - tolerance:
return True
if len(intervals) == index + 1 or intervals[index + 1][0] - intervals[index][1] > tolerance:
return False
return False
def is_calendar_available(slot, events, employee):
""" Returns True if the given slot doesn't collide with given events for the employee
"""
start_dt = slot['UTC'][0]
end_dt = slot['UTC'][1]
event_in_scope = lambda ev: (
fields.Date.to_date(ev.start) <= fields.Date.to_date(end_dt)
and fields.Date.to_date(ev.stop) >= fields.Date.to_date(start_dt)
)
for ev in events.filtered(event_in_scope):
if ev.allday:
# allday events are considered to take the whole day in the related employee's timezone
event_tz = pytz.timezone(ev.event_tz or employee.user_id.tz or self.env.user.tz or slot['slot'].appointment_type_id.appointment_tz or 'UTC')
ev_start_dt = datetime.combine(fields.Date.from_string(ev.start_date), time.min)
ev_stop_dt = datetime.combine(fields.Date.from_string(ev.stop_date), time.max)
ev_start_dt = event_tz.localize(ev_start_dt).astimezone(pytz.UTC).replace(tzinfo=None)
ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz.UTC).replace(tzinfo=None)
if ev_start_dt < end_dt and ev_stop_dt > start_dt:
return False
elif fields.Datetime.to_datetime(ev.start) < end_dt and fields.Datetime.to_datetime(ev.stop) > start_dt:
return False
return True
workhours = {}
meetings = {}
# With context will be used in resource.calendar to force the referential user
# for work interval computing to the *user linked to the employee*
available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in (employee or self.employee_ids)]
random.shuffle(available_employees)
for slot in slots:
for emp_pos, emp in enumerate(available_employees):
if emp_pos not in workhours:
workhours[emp_pos] = [
(interval[0].astimezone(pytz.UTC).replace(tzinfo=None),
interval[1].astimezone(pytz.UTC).replace(tzinfo=None))
for interval in emp.resource_calendar_id._work_intervals_batch(
first_day, last_day, resources=emp.resource_id,
)[emp.resource_id.id]
]
if is_work_available(slot['UTC'][0], slot['UTC'][1], workhours[emp_pos]):
if emp_pos not in meetings:
# note: no check is made on the attendee's status (accepted/declined/...)
meetings[emp_pos] = self.env['calendar.event'].search([
('partner_ids.user_ids', '=', emp.user_id.id),
('start', '<', fields.Datetime.to_string(last_day.replace(hour=23, minute=59, second=59))),
('stop', '>', fields.Datetime.to_string(first_day.replace(hour=0, minute=0, second=0)))
])
if is_calendar_available(slot, meetings[emp_pos], emp):
slot['employee_id'] = emp
break
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = | return lower_bound | random_line_split |
graphs.py |
class egde:
def __init__(self,x,y):
self.v1 = x
self.v2 = y
def add_edge(self,u,v):
#for directed graphs
self.graph[u].append(v)
#for undirected graphs
#self.graph[u].append(v)
#self.graph[v].append(u)
#e = edge(u,v)
#edges.append(e)
def add_egde_weight(self,u,v,w):
self.graph[u].append((v,w)) #adds a tuple to u where v is the destibation and w is the weight of the edge.
#Carry out a bfs from a source 's'
def bfs(self,s):
#initialising list to keep track of all visited nodes, as false
visited = [False]*(len(self.graph))
#Creating a queue and pushing the source in it.
queue = []
queue.append(s)
visited[s] = True
while queue:
e = queue.pop(0)
print(e)
for i in range(0,len(self.graph[e])):
if visited[i] == False:
queue.append(i)
visited[i] == True
#shortest single source path in unweighted graph: bfs
def bfs_shortest_path(self,s):
visited = [False]*(len(self.graph))
queue = []
distances = [-1]*(len(self.graph)) #distances of all vertices
queue.append(s)
visited[s] = True
distances[s] = 0 #set distance of the source as 0
while(queue):
e = queue.pop()
for i in graph[e]:
if visited[i] == False:
if distances[i] == -1:
distances[i] = distances[e] + 1 #accumulates the sum of distances from the source
queue.append(i)
visited[i] = True
#Carry out DFS from a source
def DFS(self,v):
visited = [False]*(len(self.graph))
self.DFS_util(v,visited)
def DFS_util(self,v, visited):
visited[v] == True
print(v)
for i in self.graph[v]:
if visited[i] == False:
return self.DFS_util(i,visited)
# 1) Detect a cycle in the graph
#to detect a back edge, we can keep track of vertices currently in recursion stack of
#function for DFS traversal. If we reach a vertex that is already in the recursion stack,
#then there is a cycle in the tree. The edge that connects current vertex to
#the vertex in the recursion stack is back edge.
#We have used recStack[] array to keep track of vertices in the recursion stack.
def detect_cycle(self):
visited = [False]*len(self.graph)
rec_stack = [False]*len(self.graph) # keeps track of all vertices in the stack and helps detect cycle
for v in self.graph:
if visited[v] == False:
if self.detect_cycle_util(v,visited,rec_satck) == True:
return True
return False
def detect_cycle_util(self,v,visited,rec_stack):
visited[v] = True
rec_stack[v] = True
for node in self.graph[v]:
if visited[node] == False:
if detect_cycle_util(node,visited,rec_stack):
return True
elif rec_stack[node] == True:
return True
rec_stack[v] == False
return False
# Disjoint sets to find cycle in undirected graph
# we need find parent and construct Union functions
#recursive check on the parent_array for vertices to find the root of the disjoint set
def find_parent(self,parent_array,vertex):
if parent_array[vertex] == -1:
return vertex
else:
return find_parent(parent_array,parent_array[vertex])
#checks parents of both vertices and then points x to y to encompass them in one set
def union(self,parent_array,x,y):
x_set = self.find_parent(parent_array,x)
y_set = self.find_parent(parent_array,y)
parent_array[x_set] = y_set
#For each edge, make subsets using both the vertices of the edge.
#If both the vertices are in the same subset, a cycle is found.
# Otherwise add the two vertices of the edge in the same subset by union and report that the edge doesn't form the cylce
def detect_cycle_unweighted(self):
parent = [-1]*(len(self.graph))
for e in E:
x = self.find_parent(parent, e.v1)
y = self.find_parent(parent, e.v2)
if x == y:
return True #back edge detected as the parent and the child have the same parent
self.union(e.v1,e.v2) # else put them both in the same set since they constitute an edge
#Count number of forests in a graph:
#Approach :
#1. Apply DFS on every node.
#2. Increment count by one if every connected node is visited from one source.
#3. Again perform DFS traversal if some nodes yet not visited.
#4. Count will give the number of trees in forest.
def count_trees(self):
visited = [False]*len(self.graph)
res = 0 # keeps count of number of trees in the graph
for i in range(len(self.graph)):
if visited[i] == False:
count_trees_util(i,visited)
res = res + 1 # everytime we complete dfs from 1 node we increment the count by 1
return res
def count_trees_util(v,visited):
visited[v] == True
for i in self.graph[v]:
if visited[i] == False:
count_trees_util(i,visited)
# Problem : Given Teams A and B, is there a sequence of teams starting with A
#and ending with B such taht each team in the sequence has beaten the next team in the sequence.
# Solution, model this problem as a graph:
# teams = vertices; source = winning and sink is losing
#perform graph reachability eg dfs or bfs from A to B
#use collections.namedtuple('MatchResult',('winning_team','losing_team')) to create match results
# then create an array of these match results called matches
# use this array to construct a graph
MatchResult = collections.namedtuple('MatchResult',('winning_team','losing_team'))
def can_a_beat_b(matches,a,b):
def build_graph():
graph = collections.defaultdict(set)
for match in matches:
graph[match.winning_team].append(match.losing_team)
return graph
def is_reachable_dfs(graph,curr,dest):
visited = set()
return is_reachable_dfs_util(curr,dest,visited)
def is_reachable_dfs_util(curr,dest,visited):
if curr == dest:
return True
visited[curr] == True
for i in graph[curr]:
if visited[i] == False:
return is_reachable_dfs_util(i,dest,visited)
#Single source shortest path of a DAG:
# 1. Initialse the dist[] : SHORTEST PATH -> INF, LONGEST PATH -> -INF and source -> 0 ALWAYS
# 2. Perform a topological sort on the graph
# 3. Process the vertices in topological sort and and for each vertex , update its adjacent vertex with the weight of the edge
#
dist = [float("inf")]*len(graph)
dist[s] = 0
while(stack):
node = stack.pop()
for next,weight in graph[node]:
dist[next] = min(dist[next], dist[next] + weight)
#For Single Source Longest Path:
dist = [float("-inf")]*len(graph)
dist[s] = 0
while(stack):
node = stack.pop()
for next,weight in graph[node]:
dist[next] = max(dist[next], dist[node] + weight)
# NOTE: since we start with the source and the distance of the source is never INF, due to topological sort dist[i] will never be INF
# Also, since the node holds its distance from the source, for every consecutive vertex we need to add the dist[i](cumulative sum of all edges from the source) + weight(of the edge)
# Problem: Search a Maze
# Given a 2D array of black and white entries representing a maze with designated entrance
# and exit, find a path from the entrance to the exit. white = open spaces and black = walls
# Solution : model all the white elements as the vertices sorted according to their coordinates
# Perform a DFS from entrance to exit
# NOTE: we can use BFS but we would need to explictly maintain a queue, so avoid it unless we want the shortest path
# the coordiantes are the (i,j) of the element treating as left top corner as the (0,0)
#Union by Rank and FindParent by path compression done to get a worst case O(logn) implementation
#Naive implementations take O(n)
#parents list should maintain tuples (parent,rank) for each index(which is the vertex)
#parents = [(-1,0)] for _ in range(len(self.graph))] will initialse each vertex's parent as itself and rank as 0
def find_parent_pc(parents,v):
if parents | self.graph = defaultdict(list)
self.V = vertices
self.E = edges | identifier_body |
|
graphs.py | #Application of DFS
#1.Topological Sort -> Normal DFS with a stack that pushes the leaf node.
#2. Cycle in a directed graph
#3. Count number of forests in a graph
#4. Shortest Path in Directed Acyclic Graph using topological sort
# Applications of BFS
# 1) Shortest Path and Minimum Spanning Tree for unweighted graph/in a matrix
####################
# Q A group of two or more people wants to meet and minimize the total travel distance. You are given a 2D grid of values 0 or 1, where each 1 marks the home of someone in the group.
# The distance is calculated using Manhattan Distance, where distance(p1, p2) = |p2.x - p1.x| + |p2.y - p1.y|.
#For example, given three people living at (0,0), (0,4), and (2,2):
# 1 - 0 - 0 - 0 - 1
# | | | | |
# 0 - 0 - 0 - 0 - 0
# | | | | |
# 0 - 0 - 1 - 0 - 0
# The point (0,2) is an ideal meeting point, as the total travel distance of 2+2+2=6 is minimal. So return 6.
# Approach 1: Carry out BFS from every house(1) and add up the distances; then choose the (0) with least sum: O(n^2)
# Approach 2: Sorting (accepted)
# consider 1D problem, The point which is equidistant from either end of the row (that is the median) is the optimal point for meeting.
# We will treat the 2D problem as two independent 1D problems that is find median of the sorted row co-ordinates and sorted column co-ordinates
def minDistance1D(points, origin):
# points is array of ints since it is in 1D , co-ordinates
distance = 0
for point in points:
distance += abs(point - origin)
return distance
def minDistance2D(grid2D):
# we will use these to collect all the positions of 1s
rows = []
cols = []
for i in range(len(grid2D)):
for j in range(len(grid2D[0])):
if grid2D[i][j] == 1:
rows.append(i)
cols.append(j)
# After collection of the x coordinates in rows (which by the nature of collection are in sorted order) and y in cols(need to be sorted,
# we take the median of the two as the origin
row = rows[len(rows//2)]
cols.sort()
col = cols[len(cols//2)]
# the point they should meet on is the median of the rows and columns
meetPoint = (row, col)
dist = (minDistance1D(rows, row) + minDistance1D(cols, col))
return (dist, meetPoint)
#Q You are given a m x n 2D grid initialized with these three possible values.
# -1 - A wall or an obstacle.
# 0 - A gate.
# INF - Infinity means an empty room. We use the value 231 - 1 = 2147483647 to represent INF as you may assume that the distance to a gate is less than 2147483647.
# Fill each empty room with the distance to its nearest gate. If it is impossible to reach a gate, it should be filled with INF.
#
# For example, given the 2D grid:
# INF -1 0 INF
# INF INF INF -1
# INF -1 INF -1
# 0 -1 INF INF
# After running your function, the 2D grid should be:
# 3 -1 0 1
# 2 2 1 -1
# 1 -1 2 -1
# 0 -1 3 4
# Approach 1: Carry out a bfs from every empty room to the gate and report the minimum value of the gate O(n^2)
#Approach 2: Collect all the gates and simultaneously carry out a bfs from them. O(n)
# Assuming the 2D graph is called matrix
def wallsAndGates(matrix):
directions = [(1,0), (-1,0), (0,1), (-1,0)] #the permitted directions we can move from a point (row(x-axis), col(y-axis))
visited = collections.defaultdict(bool)
EMPTY = float("INF")
GATE = 0
WALL = -1
bfs(matrix, directions, visited)
def bfs(matrix, directions, visited):
rows = len(matrix)
cols = len(matrix[0])
q = []
#Collect all the gates and put them into a queue
for row in rows:
for col in cols:
if matrix[row][col] == GATE:
q.append((row, col))
#loop to carry out bfs from gates
while len(q) != 0:
e = q.pop()
x = e[0]
y = e[1]
visited[e] = True
for d in directions:
r = x + d[0]
c = y + d[1]
#if the traversing takes us to non-empty or out of bounds then try again with another direction
if r<0 || c<0 || r>=rows || c>=cols || matrix[r][c] != EMPTY || visited[(r,c)] is True:
continue
#if we are within bounds and land on an empty space record the distance from the point from where we moved
matrix[r][c] = matrix[x][y] + 1
#add the empty space to the queue to continue the bfs
q.append((r,c))
# Q. Given a non-empty 2D array grid of 0's and 1's, an island is a group of 1's (representing land) connected 4-directionally (horizontal or vertical.)
# You may assume all four edges of the grid are surrounded by water.
# Count the number of distinct islands.
# An island is considered to be the same as another if they have the same shape,
# or have the same shape after rotation (90, 180, or 270 degrees only) or reflection (left/right direction or up/down direction).
#
# Example 1:
# 11000
# 10000
# 00001
# 00011
# Given the above grid map, return 1.
#
# Notice that:
# 11
# 1
# and
# 1
# 11
# are considered same island shapes. Because if we make a 180 degrees clockwise rotation on the first island, then two islands will have the same shapes.
# Example 2:
# 11100
# 10001
# 01001
# 01110
# Given the above grid map, return 2.
#
# Here are the two distinct islands:
# 111
# 1
# and
# 1
# 1
#
# Notice that:
# 111
# 1
# and
# 1
# 111
# are considered same island shapes. Because if we flip the first array in the up/down direction, then they have the same shapes.
# Approach : Canonical hash
# 1. carry out dfs in the matrix from 1s
# use complex numbers in python for easy transformations
# 8 possibble transformations for every point
# For each of 8 possible rotations and reflections of the shape, we will perform the transformation and then translate the shape
# so that the bottom-left-most coordinate is (0, 0). Afterwards, we will consider the canonical hash of the shape to be
# the maximum of these 8 intermediate hashes.
# Intuition
# We determine local coordinates for each island.
# Afterwards, we will rotate and reflect the coordinates about the origin and translate the shape so that the bottom-left-most coordinate is (0, 0). At the end, the smallest of these lists coordinates will be the canonical representation of the shape.
# Algorithm
# We feature two different implementations, but the core idea is the same. We start with the code from the previous problem, Number of Distinct Islands.
# For each of 8 possible rotations and reflections of the shape, we will perform the transformation and then translate the shape so that the bottom-left-most coordinate is (0, 0).
# Afterwards, we will consider the canonical hash of the shape to be the maximum of these 8 intermediate hashes.
# In Python, the motivation to use complex numbers is that rotation by 90 degrees is the same as multiplying by the imaginary unit, 1j.
# In Java, we manipulate the coordinates directly. The 8 rotations and reflections of each point are (x, y), (-x, y), (x, -y), (-x, -y), (y, x), (-y, x), (y, -x), (-y, -x)
def | numIslands | identifier_name |
|
graphs.py | . Perform a topological sort on the graph
# 3. Process the vertices in topological sort and and for each vertex , update its adjacent vertex with the weight of the edge
#
dist = [float("inf")]*len(graph)
dist[s] = 0
while(stack):
node = stack.pop()
for next,weight in graph[node]:
dist[next] = min(dist[next], dist[next] + weight)
#For Single Source Longest Path:
dist = [float("-inf")]*len(graph)
dist[s] = 0
while(stack):
node = stack.pop()
for next,weight in graph[node]:
dist[next] = max(dist[next], dist[node] + weight)
# NOTE: since we start with the source and the distance of the source is never INF, due to topological sort dist[i] will never be INF
# Also, since the node holds its distance from the source, for every consecutive vertex we need to add the dist[i](cumulative sum of all edges from the source) + weight(of the edge)
# Problem: Search a Maze
# Given a 2D array of black and white entries representing a maze with designated entrance
# and exit, find a path from the entrance to the exit. white = open spaces and black = walls
# Solution : model all the white elements as the vertices sorted according to their coordinates
# Perform a DFS from entrance to exit
# NOTE: we can use BFS but we would need to explictly maintain a queue, so avoid it unless we want the shortest path
# the coordiantes are the (i,j) of the element treating as left top corner as the (0,0)
#Union by Rank and FindParent by path compression done to get a worst case O(logn) implementation | #Naive implementations take O(n)
#parents list should maintain tuples (parent,rank) for each index(which is the vertex)
#parents = [(-1,0)] for _ in range(len(self.graph))] will initialse each vertex's parent as itself and rank as 0
def find_parent_pc(parents,v):
if parents[v][0] == -1:
return v
else:
parents[v][0] = find_parent_pc(parents,parents[v][0])
#Here if the node is not its own parent then we recursiely find the parent of the set and then
#SET THE NODE"S PARENT TO WHAT WE FIND AFTER RECURSION
# eg parent of 3 is 2, parent of 2 is 1 and parent of 1 is 1 => set is represented by 1 but the set is 3->2->1
# we find the set representation of 3 as 1 and then set parent of 3 as 1 so that we have 3->1<-2
#Always attach the shorter sets to longer sets
def union_rank(parents,x,y):
p1 = find_parent_pc(parents,x)
p2 = find_parent_pc(parents,y)
if parents[p1][1] < parents[p2][1]:
parents[p1][0] = p2 #set parent of p1 to p2
elif parents[p1][1] > parents[p2][1]:
parents[p2][0] = p1
else: #if the ranks are same, set any one as parent and increase its rank
parents[p1][0] = p2 #set parent of p1 to p2
parents[p2][1] = parents[p2][1] + 1
#NOTE:
#1. For path compression and union by ranking each vertex needs to store (rank, parent) information with it:
# these have to be mutable as rank and parents will change , so do NOT use namedtuples as they are immutable objects
#2. to create a list of lists:
# l = [[]]*3 will create 3 references to the same list, hence on l[0].append(1) the result will be [[1],[1],[1]]
# Use l = [[] for _ in range(3)]
#3. list.sort() returns None as it sorts the original list in place:
# Use sorted(list) to assign it to a new or temporary array
#Application of DFS
#1.Topological Sort -> Normal DFS with a stack that pushes the leaf node.
#2. Cycle in a directed graph
#3. Count number of forests in a graph
#4. Shortest Path in Directed Acyclic Graph using topological sort
# Applications of BFS
# 1) Shortest Path and Minimum Spanning Tree for unweighted graph/in a matrix
####################
# Q A group of two or more people wants to meet and minimize the total travel distance. You are given a 2D grid of values 0 or 1, where each 1 marks the home of someone in the group.
# The distance is calculated using Manhattan Distance, where distance(p1, p2) = |p2.x - p1.x| + |p2.y - p1.y|.
#For example, given three people living at (0,0), (0,4), and (2,2):
# 1 - 0 - 0 - 0 - 1
# | | | | |
# 0 - 0 - 0 - 0 - 0
# | | | | |
# 0 - 0 - 1 - 0 - 0
# The point (0,2) is an ideal meeting point, as the total travel distance of 2+2+2=6 is minimal. So return 6.
# Approach 1: Carry out BFS from every house(1) and add up the distances; then choose the (0) with least sum: O(n^2)
# Approach 2: Sorting (accepted)
# consider 1D problem, The point which is equidistant from either end of the row (that is the median) is the optimal point for meeting.
# We will treat the 2D problem as two independent 1D problems that is find median of the sorted row co-ordinates and sorted column co-ordinates
def minDistance1D(points, origin):
# points is array of ints since it is in 1D , co-ordinates
distance = 0
for point in points:
distance += abs(point - origin)
return distance
def minDistance2D(grid2D):
# we will use these to collect all the positions of 1s
rows = []
cols = []
for i in range(len(grid2D)):
for j in range(len(grid2D[0])):
if grid2D[i][j] == 1:
rows.append(i)
cols.append(j)
# After collection of the x coordinates in rows (which by the nature of collection are in sorted order) and y in cols(need to be sorted,
# we take the median of the two as the origin
row = rows[len(rows//2)]
cols.sort()
col = cols[len(cols//2)]
# the point they should meet on is the median of the rows and columns
meetPoint = (row, col)
dist = (minDistance1D(rows, row) + minDistance1D(cols, col))
return (dist, meetPoint)
#Q You are given a m x n 2D grid initialized with these three possible values.
# -1 - A wall or an obstacle.
# 0 - A gate.
# INF - Infinity means an empty room. We use the value 231 - 1 = 2147483647 to represent INF as you may assume that the distance to a gate is less than 2147483647.
# Fill each empty room with the distance to its nearest gate. If it is impossible to reach a gate, it should be filled with INF.
#
# For example, given the 2D grid:
# INF -1 0 INF
# INF INF INF -1
# INF -1 INF -1
# 0 -1 INF INF
# After running your function, the 2D grid should be:
# 3 -1 0 1
# 2 2 1 -1
# 1 -1 2 -1
# 0 -1 3 4
# Approach 1: Carry out a bfs from every empty room to the gate and report the minimum value of the gate O(n^2)
#Approach 2: Collect all the gates and simultaneously carry out a bfs from them. O(n)
# Assuming the 2D graph is called matrix
def wallsAndGates(matrix):
directions = [(1,0), (-1,0), (0,1), (-1,0)] #the permitted directions we can move from a point (row(x-axis), col(y-axis))
visited = collections.defaultdict(bool)
EMPTY = float("INF")
GATE = 0
WALL = -1
bfs(matrix, directions, visited)
def bfs(matrix, directions, visited):
rows = len(matrix)
cols = len(matrix[0])
q = []
#Collect all the gates and put them into a queue
for row in rows:
for col in cols:
if matrix[row][col] == GATE:
q.append | random_line_split |
|
graphs.py | . Perform a topological sort on the graph
# 3. Process the vertices in topological sort and and for each vertex , update its adjacent vertex with the weight of the edge
#
dist = [float("inf")]*len(graph)
dist[s] = 0
while(stack):
node = stack.pop()
for next,weight in graph[node]:
dist[next] = min(dist[next], dist[next] + weight)
#For Single Source Longest Path:
dist = [float("-inf")]*len(graph)
dist[s] = 0
while(stack):
node = stack.pop()
for next,weight in graph[node]:
dist[next] = max(dist[next], dist[node] + weight)
# NOTE: since we start with the source and the distance of the source is never INF, due to topological sort dist[i] will never be INF
# Also, since the node holds its distance from the source, for every consecutive vertex we need to add the dist[i](cumulative sum of all edges from the source) + weight(of the edge)
# Problem: Search a Maze
# Given a 2D array of black and white entries representing a maze with designated entrance
# and exit, find a path from the entrance to the exit. white = open spaces and black = walls
# Solution : model all the white elements as the vertices sorted according to their coordinates
# Perform a DFS from entrance to exit
# NOTE: we can use BFS but we would need to explictly maintain a queue, so avoid it unless we want the shortest path
# the coordiantes are the (i,j) of the element treating as left top corner as the (0,0)
#Union by Rank and FindParent by path compression done to get a worst case O(logn) implementation
#Naive implementations take O(n)
#parents list should maintain tuples (parent,rank) for each index(which is the vertex)
#parents = [(-1,0)] for _ in range(len(self.graph))] will initialse each vertex's parent as itself and rank as 0
def find_parent_pc(parents,v):
if parents[v][0] == -1:
return v
else:
parents[v][0] = find_parent_pc(parents,parents[v][0])
#Here if the node is not its own parent then we recursiely find the parent of the set and then
#SET THE NODE"S PARENT TO WHAT WE FIND AFTER RECURSION
# eg parent of 3 is 2, parent of 2 is 1 and parent of 1 is 1 => set is represented by 1 but the set is 3->2->1
# we find the set representation of 3 as 1 and then set parent of 3 as 1 so that we have 3->1<-2
#Always attach the shorter sets to longer sets
def union_rank(parents,x,y):
p1 = find_parent_pc(parents,x)
p2 = find_parent_pc(parents,y)
if parents[p1][1] < parents[p2][1]:
parents[p1][0] = p2 #set parent of p1 to p2
elif parents[p1][1] > parents[p2][1]:
parents[p2][0] = p1
else: #if the ranks are same, set any one as parent and increase its rank
parents[p1][0] = p2 #set parent of p1 to p2
parents[p2][1] = parents[p2][1] + 1
#NOTE:
#1. For path compression and union by ranking each vertex needs to store (rank, parent) information with it:
# these have to be mutable as rank and parents will change , so do NOT use namedtuples as they are immutable objects
#2. to create a list of lists:
# l = [[]]*3 will create 3 references to the same list, hence on l[0].append(1) the result will be [[1],[1],[1]]
# Use l = [[] for _ in range(3)]
#3. list.sort() returns None as it sorts the original list in place:
# Use sorted(list) to assign it to a new or temporary array
#Application of DFS
#1.Topological Sort -> Normal DFS with a stack that pushes the leaf node.
#2. Cycle in a directed graph
#3. Count number of forests in a graph
#4. Shortest Path in Directed Acyclic Graph using topological sort
# Applications of BFS
# 1) Shortest Path and Minimum Spanning Tree for unweighted graph/in a matrix
####################
# Q A group of two or more people wants to meet and minimize the total travel distance. You are given a 2D grid of values 0 or 1, where each 1 marks the home of someone in the group.
# The distance is calculated using Manhattan Distance, where distance(p1, p2) = |p2.x - p1.x| + |p2.y - p1.y|.
#For example, given three people living at (0,0), (0,4), and (2,2):
# 1 - 0 - 0 - 0 - 1
# | | | | |
# 0 - 0 - 0 - 0 - 0
# | | | | |
# 0 - 0 - 1 - 0 - 0
# The point (0,2) is an ideal meeting point, as the total travel distance of 2+2+2=6 is minimal. So return 6.
# Approach 1: Carry out BFS from every house(1) and add up the distances; then choose the (0) with least sum: O(n^2)
# Approach 2: Sorting (accepted)
# consider 1D problem, The point which is equidistant from either end of the row (that is the median) is the optimal point for meeting.
# We will treat the 2D problem as two independent 1D problems that is find median of the sorted row co-ordinates and sorted column co-ordinates
def minDistance1D(points, origin):
# points is array of ints since it is in 1D , co-ordinates
distance = 0
for point in points:
distance += abs(point - origin)
return distance
def minDistance2D(grid2D):
# we will use these to collect all the positions of 1s
rows = []
cols = []
for i in range(len(grid2D)):
|
# After collection of the x coordinates in rows (which by the nature of collection are in sorted order) and y in cols(need to be sorted,
# we take the median of the two as the origin
row = rows[len(rows//2)]
cols.sort()
col = cols[len(cols//2)]
# the point they should meet on is the median of the rows and columns
meetPoint = (row, col)
dist = (minDistance1D(rows, row) + minDistance1D(cols, col))
return (dist, meetPoint)
#Q You are given a m x n 2D grid initialized with these three possible values.
# -1 - A wall or an obstacle.
# 0 - A gate.
# INF - Infinity means an empty room. We use the value 231 - 1 = 2147483647 to represent INF as you may assume that the distance to a gate is less than 2147483647.
# Fill each empty room with the distance to its nearest gate. If it is impossible to reach a gate, it should be filled with INF.
#
# For example, given the 2D grid:
# INF -1 0 INF
# INF INF INF -1
# INF -1 INF -1
# 0 -1 INF INF
# After running your function, the 2D grid should be:
# 3 -1 0 1
# 2 2 1 -1
# 1 -1 2 -1
# 0 -1 3 4
# Approach 1: Carry out a bfs from every empty room to the gate and report the minimum value of the gate O(n^2)
#Approach 2: Collect all the gates and simultaneously carry out a bfs from them. O(n)
# Assuming the 2D graph is called matrix
def wallsAndGates(matrix):
directions = [(1,0), (-1,0), (0,1), (-1,0)] #the permitted directions we can move from a point (row(x-axis), col(y-axis))
visited = collections.defaultdict(bool)
EMPTY = float("INF")
GATE = 0
WALL = -1
bfs(matrix, directions, visited)
def bfs(matrix, directions, visited):
rows = len(matrix)
cols = len(matrix[0])
q = []
#Collect all the gates and put them into a queue
for row in rows:
for col in cols:
if matrix[row][col] == GATE:
| for j in range(len(grid2D[0])):
if grid2D[i][j] == 1:
rows.append(i)
cols.append(j) | conditional_block |
restconf.py | Systems, Inc.
"""
import os
import re
import logging
import json
import subprocess
import datetime
import lxml.etree as et
from jinja2 import Template, Environment, FileSystemLoader
from collections import OrderedDict
from explorer.utils.admin import ModuleAdmin
def get_op(keyvalue, mode):
'''
Return option and path depth of option to use for URL.
URL should extend to where option is placed and message body
should contain everything beyond.
'''
for child in keyvalue:
op = child.attrib.get("option", "")
if mode in ["get", "get-config"]:
op = child.attrib.get('flag')
path = child.attrib.get("path")
path_len = len(path.split("/"))
if op == 'remove':
return ('delete', path_len-2)
elif op == 'replace':
return ('merge', path_len-2)
elif op:
return (op, path_len-2)
elif op in ["get", "get-config"]:
return (op, path_len-2)
if not op:
if mode in ["get", "get-config"]:
return ('get', 0)
return ('merge', 0)
def url_escape_chars(val):
'''
Some characters must be converted to work in a URL
'''
if not isinstance(val, (str, unicode)):
return val
return val.replace("/", "%2F").replace(":", "%3A").replace(" ", "%20")
def set_type(val):
'''
Using json.dumps() to convert dict to JSON strings requires that
actual Python values are correct in dict
TODO: Not sure if all correct datatypes are here. What about
typedefs? Can they be represented in string form?
'''
if not val.text:
return None
if val.datatype == 'string' or ':' in val.datatype:
return val.text
if val.datatype.startswith('int') or val.datatype.startswith('uint'):
return int(val.text)
return val.text
def add_container(seg, msg):
|
def add_list(seg, msg):
kdict = OrderedDict()
for key in seg.keys:
kdict[key.name] = set_type(key)
for leaf in seg.leaves:
kdict[leaf.name] = set_type(leaf)
return kdict
def build_msg(segs, msg=OrderedDict()):
for seg in segs:
if seg.type == 'container':
cont = add_container(seg, msg)
msg[seg.name] = cont
build_msg(seg.segments, cont)
elif seg.type == 'list':
lst = add_list(seg, msg)
msg[seg.name] = [lst]
build_msg(seg.segments, lst)
else:
msg[seg.name] = set_type(seg)
return msg
class Segment(object):
'''
Utility class to make handling of lxml Element classes easier to deal with
'''
def __init__(self, seg, text=''):
self.name = seg.attrib.get('name')
self.type = seg.attrib.get('type')
self.datatype = seg.attrib.get('datatype')
self.presence = seg.attrib.get('presence')
self.text = text
self.leaves = []
self.segments = []
self.keys = []
self.depth = 0
def __eq__(self, x):
'''
Takes an lxml Element object based on cxml node tags and compares the
name attribute. Makes it easier to use "==" and "in" operators
'''
if hasattr(x, 'attrib'):
return self.name == x.attrib.get('name')
return False
def __str__(self):
return self.name
def parse_url(username, request, mode):
'''
Main function that creates a URL and message body that uses the cxml (lxml)
Element nodes from a defined test from the YangExplorer GUI
Rules:
https://tools.ietf.org/html/draft-ietf-netconf-restconf-09
No option attribute defaults to PATCH operation with shortest possible URL.
Rest of data is put in message body in JSON format.
Option found in path denotes length of URL. Any data beyond option is put
into message body.
'''
keyvalue = request.find('keyvalue')
cxml = None
name = ''
tpath = []
master_segment = None
op, op_location = get_op(keyvalue, mode)
paths = []
url = None
pdict = {}
msg = {}
#pdb.set_trace()
for child in keyvalue:
path = child.get('path', '')
path = path.split("/")
if not cxml:
name = path[0]
url = [path[1]]
#if op not in ['delete', 'replace']:
# return (name, op, url)
filename = ModuleAdmin.cxml_path(username, path[0])
cxml = et.parse(filename)
paths.append((path, child))
prev_seg = False
for path, child in paths:
rt = cxml.getroot()
prev_list = False
depth = 0
for p in path:
depth += 1
chld = rt.getchildren()
for n in chld:
if n.attrib and n.attrib.get('name') == p:
if prev_list:
if n.attrib.get('is_key') == 'true':
if n not in prev_list.keys:
t = n.attrib.get('name')
index = prev_list.keys.index(t[t.find(':')+1:])
s = Segment(n)
s.text = child.text
prev_list.keys[index] = s
more = [f for f in prev_list.keys if not isinstance(f, Segment)]
if not more:
prev_list = False
rt = n
continue
if n.attrib.get('type') in ['leaf', 'leafref', 'leaf-list']:
if prev_seg:
prev_seg.leaves.append(Segment(n, child.text))
if n not in tpath:
tpath.append(Segment(n, child.text))
elif n.attrib.get('type') == 'list':
if n in tpath:
for t in tpath:
if n == t:
prev_list = t
else:
prev_list = Segment(n)
if not master_segment:
master_segment = prev_list
elif prev_seg:
prev_seg.segments.append(prev_list)
prev_list.depth = depth
tpath.append(prev_list)
prev_list.keys = n.attrib.get('key').split()
prev_seg = prev_list
rt = n
break
elif n.attrib.get('type') in ['container']:
if n in tpath:
for t in tpath:
if n == t:
prev_seg = t
else:
cont = Segment(n)
cont.depth = depth
if not master_segment:
master_segment = cont
elif prev_seg:
for i, t in enumerate(tpath):
if t.name == prev_seg.name and t.depth == depth-1:
t.segments.append(cont)
break
prev_seg = cont
tpath.append(prev_seg)
rt = n
break
elif n.attrib.get('type') in ['case', 'choice']:
depth -= 1
rt = n
break
if op not in ["get", "get-config", 'delete']:
msg = build_msg([tpath[op_location:][0]], OrderedDict())
if op_location:
url = []
for i, seg in enumerate(tpath):
if seg.type in ['leaf', 'leafref', 'leaf-list']:
if op in ["get", "get-config", 'delete']:
if len(tpath)-1 >= i:
continue
else:
continue
s = url_escape_chars(seg.name)
url.append(s)
if op not in ["get", "get-config", 'delete'] and i == op_location:
break
if seg.type == 'list':
keys = []
for key in seg.keys:
if key is None: break
if isinstance(key, str): continue;
keys.append(key.text)
if len(keys) > 0:
k = ','.join(keys)
k = url_escape_chars(k)
url.append(k)
return (name, op, '/'.join(url), json.dumps(msg, indent=2))
def gen_restconf(username, request, mode):
'''
Request from YangExplorer GUI is processed and URL, headers, and message
body is produced.
request - xml.etree.ElementTree root (should move to lxml)
mode - edit-config or get-config
'''
rpc_exec = []
target = request.attrib.get('target', 'running')
device_data = request.find('device-auth')
if device_data is not None:
platform = device_data.attrib.get('platform', 'IOS-XE')
if platform == 'csr': platform = 'IOS-XE'
else:
platform = 'IOS-XE'
name, op, path, data = parse_url(username, request, mode)
if platform == 'IOS-XE':
url = "/restconf/api/" + target + '/'+path
else:
url = "/restconf/data/"+ target + '/'+path
| cont = OrderedDict()
if seg.presence == 'true':
return cont
for leaf in seg.leaves:
cont[leaf.name] = set_type(leaf)
return cont | identifier_body |
restconf.py | Cisco Systems, Inc.
"""
import os
import re
import logging
import json
import subprocess
import datetime
import lxml.etree as et
from jinja2 import Template, Environment, FileSystemLoader
from collections import OrderedDict
from explorer.utils.admin import ModuleAdmin
def get_op(keyvalue, mode):
'''
Return option and path depth of option to use for URL.
URL should extend to where option is placed and message body
should contain everything beyond.
'''
for child in keyvalue:
op = child.attrib.get("option", "")
if mode in ["get", "get-config"]:
op = child.attrib.get('flag')
path = child.attrib.get("path")
path_len = len(path.split("/"))
if op == 'remove':
return ('delete', path_len-2)
elif op == 'replace':
return ('merge', path_len-2)
elif op:
return (op, path_len-2)
elif op in ["get", "get-config"]:
return (op, path_len-2)
if not op:
if mode in ["get", "get-config"]:
return ('get', 0)
return ('merge', 0)
def url_escape_chars(val):
'''
Some characters must be converted to work in a URL | if not isinstance(val, (str, unicode)):
return val
return val.replace("/", "%2F").replace(":", "%3A").replace(" ", "%20")
def set_type(val):
'''
Using json.dumps() to convert dict to JSON strings requires that
actual Python values are correct in dict
TODO: Not sure if all correct datatypes are here. What about
typedefs? Can they be represented in string form?
'''
if not val.text:
return None
if val.datatype == 'string' or ':' in val.datatype:
return val.text
if val.datatype.startswith('int') or val.datatype.startswith('uint'):
return int(val.text)
return val.text
def add_container(seg, msg):
cont = OrderedDict()
if seg.presence == 'true':
return cont
for leaf in seg.leaves:
cont[leaf.name] = set_type(leaf)
return cont
def add_list(seg, msg):
kdict = OrderedDict()
for key in seg.keys:
kdict[key.name] = set_type(key)
for leaf in seg.leaves:
kdict[leaf.name] = set_type(leaf)
return kdict
def build_msg(segs, msg=OrderedDict()):
for seg in segs:
if seg.type == 'container':
cont = add_container(seg, msg)
msg[seg.name] = cont
build_msg(seg.segments, cont)
elif seg.type == 'list':
lst = add_list(seg, msg)
msg[seg.name] = [lst]
build_msg(seg.segments, lst)
else:
msg[seg.name] = set_type(seg)
return msg
class Segment(object):
'''
Utility class to make handling of lxml Element classes easier to deal with
'''
def __init__(self, seg, text=''):
self.name = seg.attrib.get('name')
self.type = seg.attrib.get('type')
self.datatype = seg.attrib.get('datatype')
self.presence = seg.attrib.get('presence')
self.text = text
self.leaves = []
self.segments = []
self.keys = []
self.depth = 0
def __eq__(self, x):
'''
Takes an lxml Element object based on cxml node tags and compares the
name attribute. Makes it easier to use "==" and "in" operators
'''
if hasattr(x, 'attrib'):
return self.name == x.attrib.get('name')
return False
def __str__(self):
return self.name
def parse_url(username, request, mode):
'''
Main function that creates a URL and message body that uses the cxml (lxml)
Element nodes from a defined test from the YangExplorer GUI
Rules:
https://tools.ietf.org/html/draft-ietf-netconf-restconf-09
No option attribute defaults to PATCH operation with shortest possible URL.
Rest of data is put in message body in JSON format.
Option found in path denotes length of URL. Any data beyond option is put
into message body.
'''
keyvalue = request.find('keyvalue')
cxml = None
name = ''
tpath = []
master_segment = None
op, op_location = get_op(keyvalue, mode)
paths = []
url = None
pdict = {}
msg = {}
#pdb.set_trace()
for child in keyvalue:
path = child.get('path', '')
path = path.split("/")
if not cxml:
name = path[0]
url = [path[1]]
#if op not in ['delete', 'replace']:
# return (name, op, url)
filename = ModuleAdmin.cxml_path(username, path[0])
cxml = et.parse(filename)
paths.append((path, child))
prev_seg = False
for path, child in paths:
rt = cxml.getroot()
prev_list = False
depth = 0
for p in path:
depth += 1
chld = rt.getchildren()
for n in chld:
if n.attrib and n.attrib.get('name') == p:
if prev_list:
if n.attrib.get('is_key') == 'true':
if n not in prev_list.keys:
t = n.attrib.get('name')
index = prev_list.keys.index(t[t.find(':')+1:])
s = Segment(n)
s.text = child.text
prev_list.keys[index] = s
more = [f for f in prev_list.keys if not isinstance(f, Segment)]
if not more:
prev_list = False
rt = n
continue
if n.attrib.get('type') in ['leaf', 'leafref', 'leaf-list']:
if prev_seg:
prev_seg.leaves.append(Segment(n, child.text))
if n not in tpath:
tpath.append(Segment(n, child.text))
elif n.attrib.get('type') == 'list':
if n in tpath:
for t in tpath:
if n == t:
prev_list = t
else:
prev_list = Segment(n)
if not master_segment:
master_segment = prev_list
elif prev_seg:
prev_seg.segments.append(prev_list)
prev_list.depth = depth
tpath.append(prev_list)
prev_list.keys = n.attrib.get('key').split()
prev_seg = prev_list
rt = n
break
elif n.attrib.get('type') in ['container']:
if n in tpath:
for t in tpath:
if n == t:
prev_seg = t
else:
cont = Segment(n)
cont.depth = depth
if not master_segment:
master_segment = cont
elif prev_seg:
for i, t in enumerate(tpath):
if t.name == prev_seg.name and t.depth == depth-1:
t.segments.append(cont)
break
prev_seg = cont
tpath.append(prev_seg)
rt = n
break
elif n.attrib.get('type') in ['case', 'choice']:
depth -= 1
rt = n
break
if op not in ["get", "get-config", 'delete']:
msg = build_msg([tpath[op_location:][0]], OrderedDict())
if op_location:
url = []
for i, seg in enumerate(tpath):
if seg.type in ['leaf', 'leafref', 'leaf-list']:
if op in ["get", "get-config", 'delete']:
if len(tpath)-1 >= i:
continue
else:
continue
s = url_escape_chars(seg.name)
url.append(s)
if op not in ["get", "get-config", 'delete'] and i == op_location:
break
if seg.type == 'list':
keys = []
for key in seg.keys:
if key is None: break
if isinstance(key, str): continue;
keys.append(key.text)
if len(keys) > 0:
k = ','.join(keys)
k = url_escape_chars(k)
url.append(k)
return (name, op, '/'.join(url), json.dumps(msg, indent=2))
def gen_restconf(username, request, mode):
'''
Request from YangExplorer GUI is processed and URL, headers, and message
body is produced.
request - xml.etree.ElementTree root (should move to lxml)
mode - edit-config or get-config
'''
rpc_exec = []
target = request.attrib.get('target', 'running')
device_data = request.find('device-auth')
if device_data is not None:
platform = device_data.attrib.get('platform', 'IOS-XE')
if platform == 'csr': platform = 'IOS-XE'
else:
platform = 'IOS-XE'
name, op, path, data = parse_url(username, request, mode)
if platform == 'IOS-XE':
url = "/restconf/api/" + target + '/'+path
else:
url = "/restconf/data/"+ target + '/'+path
| ''' | random_line_split |
restconf.py | Systems, Inc.
"""
import os
import re
import logging
import json
import subprocess
import datetime
import lxml.etree as et
from jinja2 import Template, Environment, FileSystemLoader
from collections import OrderedDict
from explorer.utils.admin import ModuleAdmin
def | (keyvalue, mode):
'''
Return option and path depth of option to use for URL.
URL should extend to where option is placed and message body
should contain everything beyond.
'''
for child in keyvalue:
op = child.attrib.get("option", "")
if mode in ["get", "get-config"]:
op = child.attrib.get('flag')
path = child.attrib.get("path")
path_len = len(path.split("/"))
if op == 'remove':
return ('delete', path_len-2)
elif op == 'replace':
return ('merge', path_len-2)
elif op:
return (op, path_len-2)
elif op in ["get", "get-config"]:
return (op, path_len-2)
if not op:
if mode in ["get", "get-config"]:
return ('get', 0)
return ('merge', 0)
def url_escape_chars(val):
'''
Some characters must be converted to work in a URL
'''
if not isinstance(val, (str, unicode)):
return val
return val.replace("/", "%2F").replace(":", "%3A").replace(" ", "%20")
def set_type(val):
'''
Using json.dumps() to convert dict to JSON strings requires that
actual Python values are correct in dict
TODO: Not sure if all correct datatypes are here. What about
typedefs? Can they be represented in string form?
'''
if not val.text:
return None
if val.datatype == 'string' or ':' in val.datatype:
return val.text
if val.datatype.startswith('int') or val.datatype.startswith('uint'):
return int(val.text)
return val.text
def add_container(seg, msg):
cont = OrderedDict()
if seg.presence == 'true':
return cont
for leaf in seg.leaves:
cont[leaf.name] = set_type(leaf)
return cont
def add_list(seg, msg):
kdict = OrderedDict()
for key in seg.keys:
kdict[key.name] = set_type(key)
for leaf in seg.leaves:
kdict[leaf.name] = set_type(leaf)
return kdict
def build_msg(segs, msg=OrderedDict()):
for seg in segs:
if seg.type == 'container':
cont = add_container(seg, msg)
msg[seg.name] = cont
build_msg(seg.segments, cont)
elif seg.type == 'list':
lst = add_list(seg, msg)
msg[seg.name] = [lst]
build_msg(seg.segments, lst)
else:
msg[seg.name] = set_type(seg)
return msg
class Segment(object):
'''
Utility class to make handling of lxml Element classes easier to deal with
'''
def __init__(self, seg, text=''):
self.name = seg.attrib.get('name')
self.type = seg.attrib.get('type')
self.datatype = seg.attrib.get('datatype')
self.presence = seg.attrib.get('presence')
self.text = text
self.leaves = []
self.segments = []
self.keys = []
self.depth = 0
def __eq__(self, x):
'''
Takes an lxml Element object based on cxml node tags and compares the
name attribute. Makes it easier to use "==" and "in" operators
'''
if hasattr(x, 'attrib'):
return self.name == x.attrib.get('name')
return False
def __str__(self):
return self.name
def parse_url(username, request, mode):
'''
Main function that creates a URL and message body that uses the cxml (lxml)
Element nodes from a defined test from the YangExplorer GUI
Rules:
https://tools.ietf.org/html/draft-ietf-netconf-restconf-09
No option attribute defaults to PATCH operation with shortest possible URL.
Rest of data is put in message body in JSON format.
Option found in path denotes length of URL. Any data beyond option is put
into message body.
'''
keyvalue = request.find('keyvalue')
cxml = None
name = ''
tpath = []
master_segment = None
op, op_location = get_op(keyvalue, mode)
paths = []
url = None
pdict = {}
msg = {}
#pdb.set_trace()
for child in keyvalue:
path = child.get('path', '')
path = path.split("/")
if not cxml:
name = path[0]
url = [path[1]]
#if op not in ['delete', 'replace']:
# return (name, op, url)
filename = ModuleAdmin.cxml_path(username, path[0])
cxml = et.parse(filename)
paths.append((path, child))
prev_seg = False
for path, child in paths:
rt = cxml.getroot()
prev_list = False
depth = 0
for p in path:
depth += 1
chld = rt.getchildren()
for n in chld:
if n.attrib and n.attrib.get('name') == p:
if prev_list:
if n.attrib.get('is_key') == 'true':
if n not in prev_list.keys:
t = n.attrib.get('name')
index = prev_list.keys.index(t[t.find(':')+1:])
s = Segment(n)
s.text = child.text
prev_list.keys[index] = s
more = [f for f in prev_list.keys if not isinstance(f, Segment)]
if not more:
prev_list = False
rt = n
continue
if n.attrib.get('type') in ['leaf', 'leafref', 'leaf-list']:
if prev_seg:
prev_seg.leaves.append(Segment(n, child.text))
if n not in tpath:
tpath.append(Segment(n, child.text))
elif n.attrib.get('type') == 'list':
if n in tpath:
for t in tpath:
if n == t:
prev_list = t
else:
prev_list = Segment(n)
if not master_segment:
master_segment = prev_list
elif prev_seg:
prev_seg.segments.append(prev_list)
prev_list.depth = depth
tpath.append(prev_list)
prev_list.keys = n.attrib.get('key').split()
prev_seg = prev_list
rt = n
break
elif n.attrib.get('type') in ['container']:
if n in tpath:
for t in tpath:
if n == t:
prev_seg = t
else:
cont = Segment(n)
cont.depth = depth
if not master_segment:
master_segment = cont
elif prev_seg:
for i, t in enumerate(tpath):
if t.name == prev_seg.name and t.depth == depth-1:
t.segments.append(cont)
break
prev_seg = cont
tpath.append(prev_seg)
rt = n
break
elif n.attrib.get('type') in ['case', 'choice']:
depth -= 1
rt = n
break
if op not in ["get", "get-config", 'delete']:
msg = build_msg([tpath[op_location:][0]], OrderedDict())
if op_location:
url = []
for i, seg in enumerate(tpath):
if seg.type in ['leaf', 'leafref', 'leaf-list']:
if op in ["get", "get-config", 'delete']:
if len(tpath)-1 >= i:
continue
else:
continue
s = url_escape_chars(seg.name)
url.append(s)
if op not in ["get", "get-config", 'delete'] and i == op_location:
break
if seg.type == 'list':
keys = []
for key in seg.keys:
if key is None: break
if isinstance(key, str): continue;
keys.append(key.text)
if len(keys) > 0:
k = ','.join(keys)
k = url_escape_chars(k)
url.append(k)
return (name, op, '/'.join(url), json.dumps(msg, indent=2))
def gen_restconf(username, request, mode):
'''
Request from YangExplorer GUI is processed and URL, headers, and message
body is produced.
request - xml.etree.ElementTree root (should move to lxml)
mode - edit-config or get-config
'''
rpc_exec = []
target = request.attrib.get('target', 'running')
device_data = request.find('device-auth')
if device_data is not None:
platform = device_data.attrib.get('platform', 'IOS-XE')
if platform == 'csr': platform = 'IOS-XE'
else:
platform = 'IOS-XE'
name, op, path, data = parse_url(username, request, mode)
if platform == 'IOS-XE':
url = "/restconf/api/" + target + '/'+path
else:
url = "/restconf/data/"+ target + '/'+path
| get_op | identifier_name |
restconf.py | Systems, Inc.
"""
import os
import re
import logging
import json
import subprocess
import datetime
import lxml.etree as et
from jinja2 import Template, Environment, FileSystemLoader
from collections import OrderedDict
from explorer.utils.admin import ModuleAdmin
def get_op(keyvalue, mode):
'''
Return option and path depth of option to use for URL.
URL should extend to where option is placed and message body
should contain everything beyond.
'''
for child in keyvalue:
op = child.attrib.get("option", "")
if mode in ["get", "get-config"]:
op = child.attrib.get('flag')
path = child.attrib.get("path")
path_len = len(path.split("/"))
if op == 'remove':
return ('delete', path_len-2)
elif op == 'replace':
return ('merge', path_len-2)
elif op:
return (op, path_len-2)
elif op in ["get", "get-config"]:
return (op, path_len-2)
if not op:
if mode in ["get", "get-config"]:
return ('get', 0)
return ('merge', 0)
def url_escape_chars(val):
'''
Some characters must be converted to work in a URL
'''
if not isinstance(val, (str, unicode)):
return val
return val.replace("/", "%2F").replace(":", "%3A").replace(" ", "%20")
def set_type(val):
'''
Using json.dumps() to convert dict to JSON strings requires that
actual Python values are correct in dict
TODO: Not sure if all correct datatypes are here. What about
typedefs? Can they be represented in string form?
'''
if not val.text:
return None
if val.datatype == 'string' or ':' in val.datatype:
return val.text
if val.datatype.startswith('int') or val.datatype.startswith('uint'):
return int(val.text)
return val.text
def add_container(seg, msg):
cont = OrderedDict()
if seg.presence == 'true':
return cont
for leaf in seg.leaves:
cont[leaf.name] = set_type(leaf)
return cont
def add_list(seg, msg):
kdict = OrderedDict()
for key in seg.keys:
kdict[key.name] = set_type(key)
for leaf in seg.leaves:
kdict[leaf.name] = set_type(leaf)
return kdict
def build_msg(segs, msg=OrderedDict()):
for seg in segs:
if seg.type == 'container':
cont = add_container(seg, msg)
msg[seg.name] = cont
build_msg(seg.segments, cont)
elif seg.type == 'list':
lst = add_list(seg, msg)
msg[seg.name] = [lst]
build_msg(seg.segments, lst)
else:
msg[seg.name] = set_type(seg)
return msg
class Segment(object):
'''
Utility class to make handling of lxml Element classes easier to deal with
'''
def __init__(self, seg, text=''):
self.name = seg.attrib.get('name')
self.type = seg.attrib.get('type')
self.datatype = seg.attrib.get('datatype')
self.presence = seg.attrib.get('presence')
self.text = text
self.leaves = []
self.segments = []
self.keys = []
self.depth = 0
def __eq__(self, x):
'''
Takes an lxml Element object based on cxml node tags and compares the
name attribute. Makes it easier to use "==" and "in" operators
'''
if hasattr(x, 'attrib'):
|
return False
def __str__(self):
return self.name
def parse_url(username, request, mode):
'''
Main function that creates a URL and message body that uses the cxml (lxml)
Element nodes from a defined test from the YangExplorer GUI
Rules:
https://tools.ietf.org/html/draft-ietf-netconf-restconf-09
No option attribute defaults to PATCH operation with shortest possible URL.
Rest of data is put in message body in JSON format.
Option found in path denotes length of URL. Any data beyond option is put
into message body.
'''
keyvalue = request.find('keyvalue')
cxml = None
name = ''
tpath = []
master_segment = None
op, op_location = get_op(keyvalue, mode)
paths = []
url = None
pdict = {}
msg = {}
#pdb.set_trace()
for child in keyvalue:
path = child.get('path', '')
path = path.split("/")
if not cxml:
name = path[0]
url = [path[1]]
#if op not in ['delete', 'replace']:
# return (name, op, url)
filename = ModuleAdmin.cxml_path(username, path[0])
cxml = et.parse(filename)
paths.append((path, child))
prev_seg = False
for path, child in paths:
rt = cxml.getroot()
prev_list = False
depth = 0
for p in path:
depth += 1
chld = rt.getchildren()
for n in chld:
if n.attrib and n.attrib.get('name') == p:
if prev_list:
if n.attrib.get('is_key') == 'true':
if n not in prev_list.keys:
t = n.attrib.get('name')
index = prev_list.keys.index(t[t.find(':')+1:])
s = Segment(n)
s.text = child.text
prev_list.keys[index] = s
more = [f for f in prev_list.keys if not isinstance(f, Segment)]
if not more:
prev_list = False
rt = n
continue
if n.attrib.get('type') in ['leaf', 'leafref', 'leaf-list']:
if prev_seg:
prev_seg.leaves.append(Segment(n, child.text))
if n not in tpath:
tpath.append(Segment(n, child.text))
elif n.attrib.get('type') == 'list':
if n in tpath:
for t in tpath:
if n == t:
prev_list = t
else:
prev_list = Segment(n)
if not master_segment:
master_segment = prev_list
elif prev_seg:
prev_seg.segments.append(prev_list)
prev_list.depth = depth
tpath.append(prev_list)
prev_list.keys = n.attrib.get('key').split()
prev_seg = prev_list
rt = n
break
elif n.attrib.get('type') in ['container']:
if n in tpath:
for t in tpath:
if n == t:
prev_seg = t
else:
cont = Segment(n)
cont.depth = depth
if not master_segment:
master_segment = cont
elif prev_seg:
for i, t in enumerate(tpath):
if t.name == prev_seg.name and t.depth == depth-1:
t.segments.append(cont)
break
prev_seg = cont
tpath.append(prev_seg)
rt = n
break
elif n.attrib.get('type') in ['case', 'choice']:
depth -= 1
rt = n
break
if op not in ["get", "get-config", 'delete']:
msg = build_msg([tpath[op_location:][0]], OrderedDict())
if op_location:
url = []
for i, seg in enumerate(tpath):
if seg.type in ['leaf', 'leafref', 'leaf-list']:
if op in ["get", "get-config", 'delete']:
if len(tpath)-1 >= i:
continue
else:
continue
s = url_escape_chars(seg.name)
url.append(s)
if op not in ["get", "get-config", 'delete'] and i == op_location:
break
if seg.type == 'list':
keys = []
for key in seg.keys:
if key is None: break
if isinstance(key, str): continue;
keys.append(key.text)
if len(keys) > 0:
k = ','.join(keys)
k = url_escape_chars(k)
url.append(k)
return (name, op, '/'.join(url), json.dumps(msg, indent=2))
def gen_restconf(username, request, mode):
'''
Request from YangExplorer GUI is processed and URL, headers, and message
body is produced.
request - xml.etree.ElementTree root (should move to lxml)
mode - edit-config or get-config
'''
rpc_exec = []
target = request.attrib.get('target', 'running')
device_data = request.find('device-auth')
if device_data is not None:
platform = device_data.attrib.get('platform', 'IOS-XE')
if platform == 'csr': platform = 'IOS-XE'
else:
platform = 'IOS-XE'
name, op, path, data = parse_url(username, request, mode)
if platform == 'IOS-XE':
url = "/restconf/api/" + target + '/'+path
else:
url = "/restconf/data/"+ target + '/'+path
| return self.name == x.attrib.get('name') | conditional_block |
invsim.py | old['extra_inventory'] + excess
return {'supply' : supply, 'inventory' : new_inv, 'shortage' : shortage,
'reward_' : new_csupply, 'missed_reward' : new_cshortage,
'extra_inventory' : new_cexcess}
def partial_order_supply_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
if old['inventory'] >= old['demand']:
supply = old['demand']
new_inv = old['inventory'] - old['demand']
shortage = 0
excess = old['inventory'] - old['demand']
else:
supply = old['inventory']
new_inv = 0
shortage = old['demand'] - old['inventory']
excess = 0
new_csupply = old['reward_'] + supply
new_cshortage = old['missed_reward'] + shortage
new_cexcess = old['extra_inventory'] + excess
return {'supply' : supply, 'inventory' : new_inv, 'shortage' : shortage,
'reward_' : new_csupply, 'missed_reward' : new_cshortage,
'extra_inventory' : new_cexcess}
def constant_inv_production_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if current_inv >= self.I0:
new_inv, production = current_inv, 0
else:
new_inv, production = self.I0, self.I0 - current_inv
return {'inventory' : new_inv, 'production' : production}
def bulk_production_step(self, delay):
def func():
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if int(self.timer)%delay==0:
if current_inv >= self.I0:
new_inv, production = current_inv, 0
else:
new_inv, production = self.I0, self.I0 - current_inv
return {'inventory' : new_inv, 'production' : production}
else:
return {'inventory' : current_inv, 'production' : 0}
return func
def ROP_production_step(self, ROP, ROO, delay=1):
def func():
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if int(self.timer)%delay==0:
if current_inv < ROP:
new_inv, production = current_inv + ROO, ROO
else:
new_inv, production = current_inv, 0
return {'inventory' : new_inv, 'production' : production}
else:
return {'inventory' : current_inv, 'production' : 0}
return func
####################### helper functions #######################
def update_logs(self, results, time):
for k, v in results.items():
if k in self.logs.keys():
if is_number(v):
self.logs[k][time] = v
else:
print('cannot add:',v,'to',k,'logs')
else:
print('invalid log key:',k)
def maxtime(self):
mt = 0
for k, v in self.logs.items():
times = list(v.keys())
times = times if len(times) != 0 else [0]
mt = mt if mt > max(times) else max(times)
return int(np.ceil(mt))
def parse_steps(self, steps, kwargs):
default_steps = { 'd_step' : self.simple_demand_step,
's_step' : self.partial_order_supply_step,
'p_step' : self.constant_inv_production_step,
'strategy' : 'simple strategy'}
if isinstance(steps, dict) and set(steps.keys()) == set(default_steps.keys()):
return steps
elif steps is None:
| steps['p_step'] = self.ROP_production_step(ROP, ROO, delay)
return steps
else:
raise ValueError("exiting parse_steps() : invalid argument steps")
def get_strategy_desc(self):
try:
strat = self.strategy
self.strategy_desc = ' '.join(strat.split('_'))
if 'ROP' in self.strategy: self.strategy_desc = r'ROP$(R_0={},R={})$'.format(self.ROP, self.ROO)#+ r', $I_0={}$'.format(self.I0)
self.strategy_foldername = '_'.join(strat.split(' '))
except:
raise Error('No strategy defined')
###################### time delay ##############################
def delay(self, delay_type=None, kwargs={}):
txt = 'setting delay type as: '
if delay_type == None:
#print(txt+'0')
return lambda : 0
elif isinstance(delay_type, str):
delay_type = delay_type.replace(' ', '_')
if delay_type == 'log_normal':
mu = kwargs.pop('mu', np.log10(500))
sigma = kwargs.pop('sigma', 5)
def func2():
t = np.random.lognormal(mu, sigma)
y2 = 60
t = t if t < y2 else y2 + np.random.randint(-10, 20)
return int(t)
#print(txt+'log normal, mu={}, sigma={}'.format(mu, sigma))
return func2
###################### plotting functions ######################
def plot_timeseries(self, showplot = True, savefig = False, **kwargs):
results_dict = {k : v for k, v in self.logs.items() if v != {}}
# get kwargs
maxtime = kwargs.pop('maxtime', None) or max(list(self.logs['demand'].keys()))
cm = kwargs.pop('cm', plt.cm.gnuplot)
figsize = kwargs.pop('figsize', (14, 12))
scatterkeys = kwargs.pop('scatterkeys', list(results_dict.keys()))
y_lims = kwargs.pop('y_lims', {}).copy()
plotorder = kwargs.pop('plotkeys', [['demand'], ['inventory'],
['supply'], ['production'],
['shortage'], ['reward_', 'missed_reward', 'extra_inventory']])
# get cmap
col = iter(cm(i/len(results_dict)) for i in range(len(results_dict)))
# setup plots
plt.close('all')
fig, axs = plt.subplots(int(np.ceil(len(plotorder)/2)), 2, sharex='col', figsize = figsize)
axs = iter(j for i in axs for j in i)
allmax = {ind : max(vals.values()) for ind, vals in results_dict.items()}
check_lims = {k : False for k in results_dict}
for plotlist in plotorder:
ax = next(axs)
for k in plotlist:
v = results_dict[k]
to_scatter, c = k in scatterkeys, next(col)
single_timeseries(ax, v, k, to_scatter, col=c, maxtime=maxtime)
if k == 'inventory' and 'ROP' in self.strategy:
ax.axhline(self.ROP, c='grey', alpha=0.7, label=r'$R_0$')
ax.legend(loc='upper left', fontsize=21)
if y_lims and isinstance(y_lims, dict):
check_lims[k] = y_lims.pop(k, False)
if check_lims[k]:
ax.set_ylim(check_lims[k])
#ax.set_xlabel('time')
ax.set_ylim(ymin=0)
ax.set_xlim((0, maxtime))
"""
if not any([check_lims[a] for a in plotlist]):
if any([a in ['supply', 'production'] for a in plotlist]):
ax.set_ylim((0, 1.1*roundup(max([allmax[k] for k in ['supply', 'production']])) ) )
elif any([a in ['demand', 'inventory'] for a in plotlist]):
ax.set_ylim((0, 1.1*(max([allmax[k] for k in ['demand', ' | strategy = kwargs.pop('strategy', None)
if strategy is None:
raise ValueError("exiting parse_steps() : no strategy or steps given")
elif isinstance(strategy, str):
strategy = ' '.join(strategy.split('_'))
if strategy == 'simple strategy':
self.I0 = kwargs.pop('I0', self.I0)
steps = default_steps
if strategy == 'bulk order strategy':
steps = default_steps
steps['strategy'] = strategy
delay = kwargs.pop('delay', 5)
steps['p_step'] = self.bulk_production_step(delay)
if strategy == 'ROP strategy':
steps = default_steps
ROP = kwargs.pop('ROP', int(self.I0))
ROO = kwargs.pop('ROO', 5*self.I0)
self.ROP, self.ROO = ROP, ROO
steps['strategy'] = strategy
delay = kwargs.pop('delay', 1) | conditional_block |
invsim.py | ['extra_inventory'] + excess
return {'supply' : supply, 'inventory' : new_inv, 'shortage' : shortage,
'reward_' : new_csupply, 'missed_reward' : new_cshortage,
'extra_inventory' : new_cexcess}
def partial_order_supply_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
if old['inventory'] >= old['demand']:
supply = old['demand']
new_inv = old['inventory'] - old['demand']
shortage = 0
excess = old['inventory'] - old['demand']
else:
supply = old['inventory']
new_inv = 0
shortage = old['demand'] - old['inventory']
excess = 0
new_csupply = old['reward_'] + supply
new_cshortage = old['missed_reward'] + shortage
new_cexcess = old['extra_inventory'] + excess
return {'supply' : supply, 'inventory' : new_inv, 'shortage' : shortage,
'reward_' : new_csupply, 'missed_reward' : new_cshortage,
'extra_inventory' : new_cexcess}
def constant_inv_production_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if current_inv >= self.I0:
new_inv, production = current_inv, 0
else: | def func():
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if int(self.timer)%delay==0:
if current_inv >= self.I0:
new_inv, production = current_inv, 0
else:
new_inv, production = self.I0, self.I0 - current_inv
return {'inventory' : new_inv, 'production' : production}
else:
return {'inventory' : current_inv, 'production' : 0}
return func
def ROP_production_step(self, ROP, ROO, delay=1):
def func():
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if int(self.timer)%delay==0:
if current_inv < ROP:
new_inv, production = current_inv + ROO, ROO
else:
new_inv, production = current_inv, 0
return {'inventory' : new_inv, 'production' : production}
else:
return {'inventory' : current_inv, 'production' : 0}
return func
####################### helper functions #######################
def update_logs(self, results, time):
for k, v in results.items():
if k in self.logs.keys():
if is_number(v):
self.logs[k][time] = v
else:
print('cannot add:',v,'to',k,'logs')
else:
print('invalid log key:',k)
def maxtime(self):
mt = 0
for k, v in self.logs.items():
times = list(v.keys())
times = times if len(times) != 0 else [0]
mt = mt if mt > max(times) else max(times)
return int(np.ceil(mt))
def parse_steps(self, steps, kwargs):
default_steps = { 'd_step' : self.simple_demand_step,
's_step' : self.partial_order_supply_step,
'p_step' : self.constant_inv_production_step,
'strategy' : 'simple strategy'}
if isinstance(steps, dict) and set(steps.keys()) == set(default_steps.keys()):
return steps
elif steps is None:
strategy = kwargs.pop('strategy', None)
if strategy is None:
raise ValueError("exiting parse_steps() : no strategy or steps given")
elif isinstance(strategy, str):
strategy = ' '.join(strategy.split('_'))
if strategy == 'simple strategy':
self.I0 = kwargs.pop('I0', self.I0)
steps = default_steps
if strategy == 'bulk order strategy':
steps = default_steps
steps['strategy'] = strategy
delay = kwargs.pop('delay', 5)
steps['p_step'] = self.bulk_production_step(delay)
if strategy == 'ROP strategy':
steps = default_steps
ROP = kwargs.pop('ROP', int(self.I0))
ROO = kwargs.pop('ROO', 5*self.I0)
self.ROP, self.ROO = ROP, ROO
steps['strategy'] = strategy
delay = kwargs.pop('delay', 1)
steps['p_step'] = self.ROP_production_step(ROP, ROO, delay)
return steps
else:
raise ValueError("exiting parse_steps() : invalid argument steps")
def get_strategy_desc(self):
try:
strat = self.strategy
self.strategy_desc = ' '.join(strat.split('_'))
if 'ROP' in self.strategy: self.strategy_desc = r'ROP$(R_0={},R={})$'.format(self.ROP, self.ROO)#+ r', $I_0={}$'.format(self.I0)
self.strategy_foldername = '_'.join(strat.split(' '))
except:
raise Error('No strategy defined')
###################### time delay ##############################
def delay(self, delay_type=None, kwargs={}):
txt = 'setting delay type as: '
if delay_type == None:
#print(txt+'0')
return lambda : 0
elif isinstance(delay_type, str):
delay_type = delay_type.replace(' ', '_')
if delay_type == 'log_normal':
mu = kwargs.pop('mu', np.log10(500))
sigma = kwargs.pop('sigma', 5)
def func2():
t = np.random.lognormal(mu, sigma)
y2 = 60
t = t if t < y2 else y2 + np.random.randint(-10, 20)
return int(t)
#print(txt+'log normal, mu={}, sigma={}'.format(mu, sigma))
return func2
###################### plotting functions ######################
def plot_timeseries(self, showplot = True, savefig = False, **kwargs):
results_dict = {k : v for k, v in self.logs.items() if v != {}}
# get kwargs
maxtime = kwargs.pop('maxtime', None) or max(list(self.logs['demand'].keys()))
cm = kwargs.pop('cm', plt.cm.gnuplot)
figsize = kwargs.pop('figsize', (14, 12))
scatterkeys = kwargs.pop('scatterkeys', list(results_dict.keys()))
y_lims = kwargs.pop('y_lims', {}).copy()
plotorder = kwargs.pop('plotkeys', [['demand'], ['inventory'],
['supply'], ['production'],
['shortage'], ['reward_', 'missed_reward', 'extra_inventory']])
# get cmap
col = iter(cm(i/len(results_dict)) for i in range(len(results_dict)))
# setup plots
plt.close('all')
fig, axs = plt.subplots(int(np.ceil(len(plotorder)/2)), 2, sharex='col', figsize = figsize)
axs = iter(j for i in axs for j in i)
allmax = {ind : max(vals.values()) for ind, vals in results_dict.items()}
check_lims = {k : False for k in results_dict}
for plotlist in plotorder:
ax = next(axs)
for k in plotlist:
v = results_dict[k]
to_scatter, c = k in scatterkeys, next(col)
single_timeseries(ax, v, k, to_scatter, col=c, maxtime=maxtime)
if k == 'inventory' and 'ROP' in self.strategy:
ax.axhline(self.ROP, c='grey', alpha=0.7, label=r'$R_0$')
ax.legend(loc='upper left', fontsize=21)
if y_lims and isinstance(y_lims, dict):
check_lims[k] = y_lims.pop(k, False)
if check_lims[k]:
ax.set_ylim(check_lims[k])
#ax.set_xlabel('time')
ax.set_ylim(ymin=0)
ax.set_xlim((0, maxtime))
"""
if not any([check_lims[a] for a in plotlist]):
if any([a in ['supply', 'production'] for a in plotlist]):
ax.set_ylim((0, 1.1*roundup(max([allmax[k] for k in ['supply', 'production']])) ) )
elif any([a in ['demand', 'inventory'] for a in plotlist]):
ax.set_ylim((0, 1.1*(max([allmax[k] for k in ['demand', ' | new_inv, production = self.I0, self.I0 - current_inv
return {'inventory' : new_inv, 'production' : production}
def bulk_production_step(self, delay): | random_line_split |
invsim.py | (self):
d = self.d_gen()
return {'demand' : d}
def all_or_nothing_supply_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
if old['inventory'] >= old['demand']:
supply = old['demand']
new_inv = old['inventory'] - old['demand']
shortage = 0
excess = old['inventory'] - old['demand']
else:
supply = 0
new_inv = old['inventory']
shortage = old['demand'] - old['inventory']
excess = old['inventory']
new_csupply = old['reward_'] + supply
new_cshortage = old['missed_reward'] + shortage
new_cexcess = old['extra_inventory'] + excess
return {'supply' : supply, 'inventory' : new_inv, 'shortage' : shortage,
'reward_' : new_csupply, 'missed_reward' : new_cshortage,
'extra_inventory' : new_cexcess}
def partial_order_supply_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
if old['inventory'] >= old['demand']:
supply = old['demand']
new_inv = old['inventory'] - old['demand']
shortage = 0
excess = old['inventory'] - old['demand']
else:
supply = old['inventory']
new_inv = 0
shortage = old['demand'] - old['inventory']
excess = 0
new_csupply = old['reward_'] + supply
new_cshortage = old['missed_reward'] + shortage
new_cexcess = old['extra_inventory'] + excess
return {'supply' : supply, 'inventory' : new_inv, 'shortage' : shortage,
'reward_' : new_csupply, 'missed_reward' : new_cshortage,
'extra_inventory' : new_cexcess}
def constant_inv_production_step(self):
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if current_inv >= self.I0:
new_inv, production = current_inv, 0
else:
new_inv, production = self.I0, self.I0 - current_inv
return {'inventory' : new_inv, 'production' : production}
def bulk_production_step(self, delay):
def func():
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if int(self.timer)%delay==0:
if current_inv >= self.I0:
new_inv, production = current_inv, 0
else:
new_inv, production = self.I0, self.I0 - current_inv
return {'inventory' : new_inv, 'production' : production}
else:
return {'inventory' : current_inv, 'production' : 0}
return func
def ROP_production_step(self, ROP, ROO, delay=1):
def func():
old = {k : get_largest_key_val(self.logs[k])[1]
for k in self.logs.keys()}
current_inv = old['inventory']
if int(self.timer)%delay==0:
if current_inv < ROP:
new_inv, production = current_inv + ROO, ROO
else:
new_inv, production = current_inv, 0
return {'inventory' : new_inv, 'production' : production}
else:
return {'inventory' : current_inv, 'production' : 0}
return func
####################### helper functions #######################
def update_logs(self, results, time):
for k, v in results.items():
if k in self.logs.keys():
if is_number(v):
self.logs[k][time] = v
else:
print('cannot add:',v,'to',k,'logs')
else:
print('invalid log key:',k)
def maxtime(self):
mt = 0
for k, v in self.logs.items():
times = list(v.keys())
times = times if len(times) != 0 else [0]
mt = mt if mt > max(times) else max(times)
return int(np.ceil(mt))
def parse_steps(self, steps, kwargs):
default_steps = { 'd_step' : self.simple_demand_step,
's_step' : self.partial_order_supply_step,
'p_step' : self.constant_inv_production_step,
'strategy' : 'simple strategy'}
if isinstance(steps, dict) and set(steps.keys()) == set(default_steps.keys()):
return steps
elif steps is None:
strategy = kwargs.pop('strategy', None)
if strategy is None:
raise ValueError("exiting parse_steps() : no strategy or steps given")
elif isinstance(strategy, str):
strategy = ' '.join(strategy.split('_'))
if strategy == 'simple strategy':
self.I0 = kwargs.pop('I0', self.I0)
steps = default_steps
if strategy == 'bulk order strategy':
steps = default_steps
steps['strategy'] = strategy
delay = kwargs.pop('delay', 5)
steps['p_step'] = self.bulk_production_step(delay)
if strategy == 'ROP strategy':
steps = default_steps
ROP = kwargs.pop('ROP', int(self.I0))
ROO = kwargs.pop('ROO', 5*self.I0)
self.ROP, self.ROO = ROP, ROO
steps['strategy'] = strategy
delay = kwargs.pop('delay', 1)
steps['p_step'] = self.ROP_production_step(ROP, ROO, delay)
return steps
else:
raise ValueError("exiting parse_steps() : invalid argument steps")
def get_strategy_desc(self):
try:
strat = self.strategy
self.strategy_desc = ' '.join(strat.split('_'))
if 'ROP' in self.strategy: self.strategy_desc = r'ROP$(R_0={},R={})$'.format(self.ROP, self.ROO)#+ r', $I_0={}$'.format(self.I0)
self.strategy_foldername = '_'.join(strat.split(' '))
except:
raise Error('No strategy defined')
###################### time delay ##############################
def delay(self, delay_type=None, kwargs={}):
txt = 'setting delay type as: '
if delay_type == None:
#print(txt+'0')
return lambda : 0
elif isinstance(delay_type, str):
delay_type = delay_type.replace(' ', '_')
if delay_type == 'log_normal':
mu = kwargs.pop('mu', np.log10(500))
sigma = kwargs.pop('sigma', 5)
def func2():
t = np.random.lognormal(mu, sigma)
y2 = 60
t = t if t < y2 else y2 + np.random.randint(-10, 20)
return int(t)
#print(txt+'log normal, mu={}, sigma={}'.format(mu, sigma))
return func2
###################### plotting functions ######################
def plot_timeseries(self, showplot = True, savefig = False, **kwargs):
results_dict = {k : v for k, v in self.logs.items() if v != {}}
# get kwargs
maxtime = kwargs.pop('maxtime', None) or max(list(self.logs['demand'].keys()))
cm = kwargs.pop('cm', plt.cm.gnuplot)
figsize = kwargs.pop('figsize', (14, 12))
scatterkeys = kwargs.pop('scatterkeys', list(results_dict.keys()))
y_lims = kwargs.pop('y_lims', {}).copy()
plotorder = kwargs.pop('plotkeys', [['demand'], ['inventory'],
['supply'], ['production'],
['shortage'], ['reward_', 'missed_reward', 'extra_inventory']])
# get cmap
col = iter(cm(i/len(results_dict)) for i in range(len(results_dict)))
# setup plots
plt.close('all')
fig, axs = plt.subplots(int(np.ceil(len(plotorder)/2)), 2, sharex='col', figsize = figsize)
axs = iter(j for i in axs for j in i)
allmax = {ind : max(vals.values()) for ind, vals in results_dict.items()}
check_lims = {k : False for k in results_dict}
for plotlist in plotorder:
ax = next(axs)
for k in plotlist:
v = results_dict[k]
to_scatter, c = k in scatterkeys, next(col)
single_timeseries(ax, v, k, to_scatter, col=c, maxtime=maxtime)
if k == 'inventory' and 'ROP' in self.strategy:
ax.axhline(self.ROP, c='grey', alpha=0.7, label=r'$R_0$')
ax.legend(loc='upper left', fontsize=21)
if y | simple_demand_step | identifier_name |
|
invsim.py | fig.savefig(savepath)
print('\t\tsaved', savepath)
#################### plot 1 thing on an axis #####################################
def single_histogram(ax, data_dict, label, to_remove = {}, logscale = False, **kwargs):
col = kwargs.pop('col', 'k')
bins = kwargs.pop('bins', 50)
figsize = kwargs.pop('figsize', (10, 5))
annotate = kwargs.pop('annotate', False)
if ax is None:
plt.figure(figsize = figsize)
ax = plt.gca()
#
removals = {}
if isinstance(data_dict, dict):
data = list(data_dict.values())
else:
data = list(data_dict)
#
numvals = len(data)
data2 = data.copy()
if label in to_remove.keys():
removals = {(label, t) : data.count(t) for t in to_remove[label]}
data2 = np.array([i for i in data if i not in to_remove[label]])
if logscale:
data2 = np.log(data2)
# begin histogram calculations
data2 = [val for val in data2 if ~np.isnan(val)]
counts, edges = np.histogram(data, bins)
vals = [np.mean(edges[i:i+2]) for i in range(len(edges) - 1)]
#print(label, len(vals), len(counts))
s1 = {key : val for key, val in zip(vals, counts) if val != 0}
if len(s1) != 0:
vals, counts = zip(*s1.items())
counts = counts/ np.sum(counts)
#print(label, vals, counts)
ax.stem(vals, counts, color = col, label = label.replace('_', ' ')+' (sample size={})'.format(numvals), s=7)
#ymax = min([1, hist_roundup(max(counts))])
ax.set_ylim(0, 0.4)
#print('\t\t\tsetting ylim 0.4')
# tick formatting
ax.set_yticks(list(np.arange(0, 1.1, 0.2)))
ax.set_yticklabels(list(np.arange(0, 1.1, 0.2)))
#ax.ticklabel_format(style='sci', axis='y', scilimits=(2,0))
#ax.yaxis.set_major_formatter(mtick.ScalarFormatter(useMathText=True))
# adding annotation
notes = '\n'.join([r'$P({})$ = {}'.format(int(k1[1]), format(v1/numvals, '.1f'))
for k1, v1 in removals.items()])
if annotate:
ax.text(0.5, 0.97, notes, ha = 'center', va = 'top', transform=ax.transAxes, fontsize = 14)
#if label == 'demand':
# ax.axvline(np.mean(data), c = 'k', alpha = 0.3)
def single_timeseries(ax, data_dict, label, to_scatter = False, **kwargs):
col = kwargs.pop('col', 'k')
s, alpha = kwargs.pop('s', 10), kwargs.pop('alpha', 1)
maxtime = kwargs.pop('maxtime', np.inf)
y_lims = kwargs.pop('y_lims', None)
#
data_dict = {time : val for time, val in data_dict.items() if time <= maxtime}
if y_lims:
ax.set_ylim(y_lims)
else:
ax.set_ylim(ymin=0)
# special code for inventory plots
"""
if label == 'inventory':
remove = {k : v for k, v in data_dict.items() if check_time(k, 1/3)}
refill = {k : v for k, v in data_dict.items() if check_time(k, 2/3)}
ax.step(remove.keys(), remove.values(), label = 'depleted', color = 'red', alpha = alpha, lw=1.5)
ax.step(refill.keys(), refill.values(), label = 'replenished', color = plt.cm.Blues(0.8), alpha = alpha, lw=2.5)
#ax.scatter(remove.keys(), remove.values(), label = 'inv after supply', color = 'red', s = s, alpha = alpha, lw = 0.2, edgecolors = 'k')
#ax.scatter(refill.keys(), refill.values(), label = 'inv after production', color = plt.cm.Blues(0.8), s = s, alpha = alpha, lw = 0.2, edgecolors = 'k')
if max([max(remove.values()), max(refill.values())]) > 10000:
ax.set_yticks([10000,20000, 30000, 40000, 50000, 60000, 70000])
ax.set_yticklabels(['10K','20K', '30K', '40K', '50K', '60K', '70K'])
lgnd = ax.legend(fontsize='xx-large', loc='upper left')
for i in range(len(lgnd.legendHandles)):
lgnd.legendHandles[i]._sizes = [45]
return 0
"""
if isinstance(data_dict, dict):
times = np.array(list(data_dict.keys()))
vals = np.array(list(data_dict.values()))
if times.size == 0:
print('not plotting key:', label.replace('_', ' '))
return 0
#print(len(times), len(vals), label, times[0], vals[0])
if to_scatter:
ax.scatter(times, vals, label = label.replace('_', ' '), color = col, s = s, alpha = alpha, lw = 0.2, edgecolors = 'k')
else:
ax.step(times, vals, label = label.replace('_', ' '), color = col, alpha = alpha, lw=1.8)
#ax.plot(times, vals, label = label.replace('_', ' '), color = col, alpha = alpha, lw=3)
#ax.scatter(times, vals, color = 'k', s = 5, alpha = 0.7)
#if label == 'demand':
# ax.axhline(np.mean(vals), c = 'k', alpha = 0.3)
#################### helper functions ##########################
def get_largest_key_val(data_dict):
k = max(data_dict.keys())
return k, data_dict[k]
def is_number(s):
try:
float(s)
return True
except:
return False
#####################################################################
class DemandGenerator:
def __init__(self, d, desc, minmax=(-np.inf, np.inf)):
assert callable(d), 'provide valid demand function'
self.d_gen = d
self.desc = desc
self.minmax = minmax
self.foldername = '_'.join(desc.replace('$', "").replace("_", "").replace('\\', "").split(' '))
self.demand_type = desc.split()[0] if desc.split()[0] in ['normal', 'powerlaw', 'uniform'] else desc
#
def __call__(self):
d = self.d_gen()
m1, m2 = self.minmax
i = 0
while d < m1 or d > m2:
#print(d)
d = self.d_gen()
i += 1
if i % 5000 == 0:
print('trying to simulate demand within range - try #', str(i))
return d
def get_normal_DemandGenerator(mu, sigma, minmax=(0, np.inf)):
""" returns DemandGenerator with N(mu, sigma)
"""
assert all([arg is not None for arg in (mu, sigma)])
d = lambda : mu + sigma * np.random.randn()
d_gen = DemandGenerator(d, r'$N(\mu={},\sigma={})$'.format(mu, sigma), minmax)
d_gen.mu, d_gen.sigma = mu, sigma
return d_gen
def get_powerlaw_DemandGenerator(alpha, mu, minmax=(-np.inf, np.inf)):
""" returns DemandGenerator as (1-k)^(1/(1-alpha))
"""
assert all([arg is not None for arg in (alpha, mu)])
from scipy.stats import beta
b = alpha * (mu - 1)
d = lambda : np.power((beta.rvs(alpha, b)), -1)
d_gen = DemandGenerator(d, r'powerlaw ($\alpha={},\mu={})$'.format(format(alpha, '.2f'), int(mu)), minmax)
d_gen.alpha = alpha
return d_gen
def get_uniform_DemandGenerator(a, b):
""" returns DemandGenerator as a + k*(b - a)
"""
assert all([arg is not None for arg in (a, b)])
d = lambda : a + np.random.random() * (b - a)
d_gen = DemandGenerator(d, r'uniform in $[{},{})$'.format(a, b), minmax=(a, b))
d_gen.a, d_gen.b = a, b
return d_gen
##################### useful
def check_time(t, r):
| t = float("{0:.2f}".format(t))
r = float("{0:.2f}".format(r))
res = float("{0:.10f}".format((t%1)-r))
return res == 0.0 | identifier_body |
|
lib.rs | pub result: String,
}
impl fmt::Display for LayerResult {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} | {}", self.layer, self.result)
}
}
/// A Transition is the LayerResult of running the command on the lower layer
/// and of running the command on the higher layer. No-op transitions are not recorded.
#[derive(Debug, Eq, Ord, PartialOrd, PartialEq)]
pub struct Transition {
pub before: Option<LayerResult>,
pub after: LayerResult,
}
impl fmt::Display for Transition {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.before {
Some(be) => write!(f, "({} -> {})", be, self.after),
None => write!(f, "-> {}", self.after),
}
}
}
/// Starts the bisect operation. Calculates highest and lowest layer result and if they have
/// different outputs it starts a binary chop to figure out which layer(s) caused the change.
fn | <T>(layers: Vec<Layer>, action: &T) -> Result<Vec<Transition>, Error>
where
T: ContainerAction + 'static,
{
let first_layer = layers.first().expect("no first layer");
let last_layer = layers.last().expect("no last layer");
let first_image_name: String = first_layer.image_name.clone();
let last_image_name = &last_layer.image_name;
let action_c = action.clone();
let left_handle = thread::spawn(move || action_c.try_container(&first_image_name));
let end = action.try_container(last_image_name);
let start = left_handle.join().expect("first layer execution error!");
if start == end {
return Ok(vec![Transition {
before: None,
after: LayerResult {
layer: last_layer.clone(),
result: start,
},
}]);
}
bisect(
Vec::from(&layers[1..layers.len() - 1]),
LayerResult {
layer: first_layer.clone(),
result: start,
},
LayerResult {
layer: last_layer.clone(),
result: end,
},
action,
)
}
fn bisect<T>(
history: Vec<Layer>,
start: LayerResult,
end: LayerResult,
action: &T,
) -> Result<Vec<Transition>, Error>
where
T: ContainerAction + 'static,
{
let size = history.len();
if size == 0 {
if start.result == end.result {
return Err(Error::new(std::io::ErrorKind::Other, ""));
}
return Ok(vec![Transition {
before: Some(start.clone()),
after: end.clone(),
}]);
}
let half = size / 2;
let mid_result = LayerResult {
layer: history[half].clone(),
result: action.try_container(&history[half].image_name),
};
if size == 1 {
let mut results = Vec::<Transition>::new();
if *start.result != mid_result.result {
results.push(Transition {
before: Some(start.clone()),
after: mid_result.clone(),
});
}
if mid_result.result != *end.result {
results.push(Transition {
before: Some(mid_result),
after: end.clone(),
});
}
return Ok(results);
}
if start.result == mid_result.result {
action.skip((mid_result.layer.height - start.layer.height) as u64);
return bisect(Vec::from(&history[half + 1..]), mid_result, end, action);
}
if mid_result.result == end.result {
action.skip((end.layer.height - mid_result.layer.height) as u64);
return bisect(Vec::from(&history[..half]), start, mid_result, action);
}
let clone_a = action.clone();
let clone_b = action.clone();
let mid_result_c = mid_result.clone();
let hist_a = Vec::from(&history[..half]);
let left_handle = thread::spawn(move || bisect(hist_a, start, mid_result, &clone_a));
let right_handle =
thread::spawn(move || bisect(Vec::from(&history[half + 1..]), mid_result_c, end, &clone_b));
let mut left_results: Vec<Transition> = left_handle
.join()
.expect("left")
.expect("left transition err");
let right_results: Vec<Transition> = right_handle
.join()
.expect("right")
.expect("right transition err");
left_results.extend(right_results); // These results are sorted later...
Ok(left_results)
}
trait ContainerAction: Clone + Send {
fn try_container(&self, container_id: &str) -> String;
fn skip(&self, count: u64) -> ();
}
#[derive(Clone)]
struct DockerContainer {
pb: Arc<ProgressBar>,
command_line: Vec<String>,
timeout_in_seconds: usize,
}
impl DockerContainer {
fn new(total: u64, command_line: Vec<String>, timeout_in_seconds: usize) -> DockerContainer {
let pb = Arc::new(ProgressBar::new(total));
DockerContainer {
pb,
command_line,
timeout_in_seconds,
}
}
}
struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize }
impl<'a> Drop for Guard<'a> {
fn drop(&mut self) {
unsafe { self.buf.set_len(self.len); }
}
}
impl ContainerAction for DockerContainer {
fn try_container(&self, container_id: &str) -> String {
let docker: Docker = Docker::connect_with_defaults().expect("docker daemon running?");
let container_name: String = rand::thread_rng().gen_range(0., 1.3e4).to_string();
//Create container
let mut create = ContainerCreateOptions::new(&container_id);
let mut host_config = ContainerHostConfig::new();
host_config.auto_remove(false);
create.host_config(host_config);
let it = self.command_line.iter();
for command in it {
create.cmd(command.clone());
}
let container: CreateContainerResponse = docker
.create_container(Some(&container_name), &create)
.expect("couldn't create container");
let result = docker.start_container(&container.id);
if result.is_err() {
let err: dockworker::errors::Error = result.unwrap_err();
return format!("{}", err);
}
let log_options = ContainerLogOptions {
stdout: true,
stderr: true,
since: None,
timestamps: None,
tail: None,
follow: true,
};
let timeout = Duration::from_secs(self.timeout_in_seconds as u64);
let mut container_output = String::new();
let now = SystemTime::now();
let timeout_time = now + timeout;
let result = docker.log_container(&container_name, &log_options);
if let Ok(result) = result {
let mut r = result;
let reservation_size = 32;
let mut buf = Vec::<u8>::new();
{
let mut g = Guard { len: buf.len(), buf: &mut buf };
loop {
if g.len == g.buf.len() {
g.buf.resize(g.len + reservation_size, 0);
}
match r.read(&mut g.buf[g.len..]) {
Ok(0) => { break; }
Ok(n) => g.len += n,
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
Err(_e) => { break; }
}
if SystemTime::now() > timeout_time {
break;
}
}
}
container_output = String::from_utf8_lossy(&buf).to_string();
}
self.pb.inc(1);
let _stop_result = docker.stop_container(&container.id, timeout);
container_output
}
fn skip(&self, count: u64) -> () {
self.pb.inc(count);
}
}
/// Struct to hold parameters.
pub struct BisectOptions {
pub timeout_in_seconds: usize,
pub trunc_size: usize,
}
/// Create containers based on layers and run command_line against them.
/// Result is the differences in std out and std err.
pub fn try_bisect(
histories: &Vec<ImageLayer>,
command_line: Vec<String>,
options: BisectOptions,
) -> Result<Vec<Transition>, Error> {
println!(
"\n{}\n\n{:?}\n",
"Command to apply to layers:".bold(),
&command_line
);
let create_and_try_container = DockerContainer::new(
histories.len() as u64,
command_line,
options.timeout_in_seconds,
);
println!("{}", "Skipped missing layers:".bold());
println!();
let mut layers = Vec::new();
for (index, event) in histories.iter().rev().enumerate() {
let mut created = event.created_by.clone();
created = truncate(&created, options.trunc_size).to_string();
match event.id.clone() {
Some(layer_name) => layers.push(Layer {
height: index,
image_name: layer_name,
creation_command: event.created_by.clone(),
}),
None => println!("{:<3}: {}.", index, truncate(&created, options.trunc_size)),
}
}
println!();
println!(
"{}",
"Bisecting found layers (running command on the layers) ==>\n | get_changes | identifier_name |
lib.rs | pub result: String,
}
impl fmt::Display for LayerResult {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} | {}", self.layer, self.result)
}
}
/// A Transition is the LayerResult of running the command on the lower layer
/// and of running the command on the higher layer. No-op transitions are not recorded.
#[derive(Debug, Eq, Ord, PartialOrd, PartialEq)]
pub struct Transition {
pub before: Option<LayerResult>,
pub after: LayerResult,
} | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.before {
Some(be) => write!(f, "({} -> {})", be, self.after),
None => write!(f, "-> {}", self.after),
}
}
}
/// Starts the bisect operation. Calculates highest and lowest layer result and if they have
/// different outputs it starts a binary chop to figure out which layer(s) caused the change.
fn get_changes<T>(layers: Vec<Layer>, action: &T) -> Result<Vec<Transition>, Error>
where
T: ContainerAction + 'static,
{
let first_layer = layers.first().expect("no first layer");
let last_layer = layers.last().expect("no last layer");
let first_image_name: String = first_layer.image_name.clone();
let last_image_name = &last_layer.image_name;
let action_c = action.clone();
let left_handle = thread::spawn(move || action_c.try_container(&first_image_name));
let end = action.try_container(last_image_name);
let start = left_handle.join().expect("first layer execution error!");
if start == end {
return Ok(vec![Transition {
before: None,
after: LayerResult {
layer: last_layer.clone(),
result: start,
},
}]);
}
bisect(
Vec::from(&layers[1..layers.len() - 1]),
LayerResult {
layer: first_layer.clone(),
result: start,
},
LayerResult {
layer: last_layer.clone(),
result: end,
},
action,
)
}
fn bisect<T>(
history: Vec<Layer>,
start: LayerResult,
end: LayerResult,
action: &T,
) -> Result<Vec<Transition>, Error>
where
T: ContainerAction + 'static,
{
let size = history.len();
if size == 0 {
if start.result == end.result {
return Err(Error::new(std::io::ErrorKind::Other, ""));
}
return Ok(vec![Transition {
before: Some(start.clone()),
after: end.clone(),
}]);
}
let half = size / 2;
let mid_result = LayerResult {
layer: history[half].clone(),
result: action.try_container(&history[half].image_name),
};
if size == 1 {
let mut results = Vec::<Transition>::new();
if *start.result != mid_result.result {
results.push(Transition {
before: Some(start.clone()),
after: mid_result.clone(),
});
}
if mid_result.result != *end.result {
results.push(Transition {
before: Some(mid_result),
after: end.clone(),
});
}
return Ok(results);
}
if start.result == mid_result.result {
action.skip((mid_result.layer.height - start.layer.height) as u64);
return bisect(Vec::from(&history[half + 1..]), mid_result, end, action);
}
if mid_result.result == end.result {
action.skip((end.layer.height - mid_result.layer.height) as u64);
return bisect(Vec::from(&history[..half]), start, mid_result, action);
}
let clone_a = action.clone();
let clone_b = action.clone();
let mid_result_c = mid_result.clone();
let hist_a = Vec::from(&history[..half]);
let left_handle = thread::spawn(move || bisect(hist_a, start, mid_result, &clone_a));
let right_handle =
thread::spawn(move || bisect(Vec::from(&history[half + 1..]), mid_result_c, end, &clone_b));
let mut left_results: Vec<Transition> = left_handle
.join()
.expect("left")
.expect("left transition err");
let right_results: Vec<Transition> = right_handle
.join()
.expect("right")
.expect("right transition err");
left_results.extend(right_results); // These results are sorted later...
Ok(left_results)
}
trait ContainerAction: Clone + Send {
fn try_container(&self, container_id: &str) -> String;
fn skip(&self, count: u64) -> ();
}
#[derive(Clone)]
struct DockerContainer {
pb: Arc<ProgressBar>,
command_line: Vec<String>,
timeout_in_seconds: usize,
}
impl DockerContainer {
fn new(total: u64, command_line: Vec<String>, timeout_in_seconds: usize) -> DockerContainer {
let pb = Arc::new(ProgressBar::new(total));
DockerContainer {
pb,
command_line,
timeout_in_seconds,
}
}
}
struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize }
impl<'a> Drop for Guard<'a> {
fn drop(&mut self) {
unsafe { self.buf.set_len(self.len); }
}
}
impl ContainerAction for DockerContainer {
fn try_container(&self, container_id: &str) -> String {
let docker: Docker = Docker::connect_with_defaults().expect("docker daemon running?");
let container_name: String = rand::thread_rng().gen_range(0., 1.3e4).to_string();
//Create container
let mut create = ContainerCreateOptions::new(&container_id);
let mut host_config = ContainerHostConfig::new();
host_config.auto_remove(false);
create.host_config(host_config);
let it = self.command_line.iter();
for command in it {
create.cmd(command.clone());
}
let container: CreateContainerResponse = docker
.create_container(Some(&container_name), &create)
.expect("couldn't create container");
let result = docker.start_container(&container.id);
if result.is_err() {
let err: dockworker::errors::Error = result.unwrap_err();
return format!("{}", err);
}
let log_options = ContainerLogOptions {
stdout: true,
stderr: true,
since: None,
timestamps: None,
tail: None,
follow: true,
};
let timeout = Duration::from_secs(self.timeout_in_seconds as u64);
let mut container_output = String::new();
let now = SystemTime::now();
let timeout_time = now + timeout;
let result = docker.log_container(&container_name, &log_options);
if let Ok(result) = result {
let mut r = result;
let reservation_size = 32;
let mut buf = Vec::<u8>::new();
{
let mut g = Guard { len: buf.len(), buf: &mut buf };
loop {
if g.len == g.buf.len() {
g.buf.resize(g.len + reservation_size, 0);
}
match r.read(&mut g.buf[g.len..]) {
Ok(0) => { break; }
Ok(n) => g.len += n,
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
Err(_e) => { break; }
}
if SystemTime::now() > timeout_time {
break;
}
}
}
container_output = String::from_utf8_lossy(&buf).to_string();
}
self.pb.inc(1);
let _stop_result = docker.stop_container(&container.id, timeout);
container_output
}
fn skip(&self, count: u64) -> () {
self.pb.inc(count);
}
}
/// Struct to hold parameters.
pub struct BisectOptions {
pub timeout_in_seconds: usize,
pub trunc_size: usize,
}
/// Create containers based on layers and run command_line against them.
/// Result is the differences in std out and std err.
pub fn try_bisect(
histories: &Vec<ImageLayer>,
command_line: Vec<String>,
options: BisectOptions,
) -> Result<Vec<Transition>, Error> {
println!(
"\n{}\n\n{:?}\n",
"Command to apply to layers:".bold(),
&command_line
);
let create_and_try_container = DockerContainer::new(
histories.len() as u64,
command_line,
options.timeout_in_seconds,
);
println!("{}", "Skipped missing layers:".bold());
println!();
let mut layers = Vec::new();
for (index, event) in histories.iter().rev().enumerate() {
let mut created = event.created_by.clone();
created = truncate(&created, options.trunc_size).to_string();
match event.id.clone() {
Some(layer_name) => layers.push(Layer {
height: index,
image_name: layer_name,
creation_command: event.created_by.clone(),
}),
None => println!("{:<3}: {}.", index, truncate(&created, options.trunc_size)),
}
}
println!();
println!(
"{}",
"Bisecting found layers (running command on the layers) ==>\n |
impl fmt::Display for Transition { | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.