code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
obj = javabridge.call(self.jobject, "getOwner", "()Lweka/core/CapabilitiesHandler;") if obj is None: return None else: return JavaObject(jobject=obj)
def owner(self)
Returns the owner of these capabilities, if any. :return: the owner, can be None :rtype: JavaObject
5.460055
4.713127
1.158478
result = [] iterator = javabridge.iterate_java(javabridge.call(self.jobject, "dependencies", "()Ljava/util/Iterator;")) for c in iterator: result.append(Capability(c)) return result
def dependencies(self)
Returns all the dependencies. :return: the dependency list :rtype: list
7.255978
7.178643
1.010773
if multi is None: return Capabilities(javabridge.static_call( "weka/core/Capabilities", "forInstances", "(Lweka/core/Instances;)Lweka/core/Capabilities;", data.jobject)) else: return Capabilities(javabridge.static_call( "weka/core/Capabilities", "forInstances", "(Lweka/core/Instances;Z)Lweka/core/Capabilities;", data.jobject, multi))
def for_instances(cls, data, multi=None)
returns a Capabilities object specific for this data. The minimum number of instances is not set, the check for multi-instance data is optional. :param data: the data to generate the capabilities for :type data: Instances :param multi: whether to check the structure, too :type multi: bool :return: the generated capabilities :rtype: Capabilities
2.331031
2.030382
1.148075
if not plot.matplotlib_available: logger.error("Matplotlib is not installed, plotting unavailable!") return # create subsample data = plot.create_subsample(data, percent=percent, seed=seed) # collect data x = [] y = [] if data.class_index == -1: c = None else: c = [] for i in range(data.num_instances): inst = data.get_instance(i) x.append(inst.get_value(index_x)) y.append(inst.get_value(index_y)) if c is not None: c.append(inst.get_value(inst.class_index)) # plot data fig, ax = plt.subplots() if c is None: ax.scatter(x, y, s=size, alpha=0.5) else: ax.scatter(x, y, c=c, s=size, alpha=0.5) ax.set_xlabel(data.attribute(index_x).name) ax.set_ylabel(data.attribute(index_y).name) if title is None: title = "Attribute scatter plot" if percent != 100: title += " (%0.1f%%)" % percent ax.set_title(title) ax.plot(ax.get_xlim(), ax.get_ylim(), ls="--", c="0.3") ax.grid(True) fig.canvas.set_window_title(data.relationname) plt.draw() if outfile is not None: plt.savefig(outfile) if wait: plt.show()
def scatter_plot(data, index_x, index_y, percent=100.0, seed=1, size=50, title=None, outfile=None, wait=True)
Plots two attributes against each other. TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html :param data: the dataset :type data: Instances :param index_x: the 0-based index of the attribute on the x axis :type index_x: int :param index_y: the 0-based index of the attribute on the y axis :type index_y: int :param percent: the percentage of the dataset to use for plotting :type percent: float :param seed: the seed value to use for subsampling :type seed: int :param size: the size of the circles in point :type size: int :param title: an optional title :type title: str :param outfile: the (optional) file to save the generated plot to. The extension determines the file format. :type outfile: str :param wait: whether to wait for the user to close the plot :type wait: bool
2.202852
2.088433
1.054787
if not plot.matplotlib_available: logger.error("Matplotlib is not installed, plotting unavailable!") return # create subsample data = plot.create_subsample(data, percent=percent, seed=seed) fig = plt.figure() if atts is None: x = [] for i in range(data.num_attributes): x.append(i) else: x = atts ax = fig.add_subplot(111) ax.set_xlabel("attributes") ax.set_ylabel("value") ax.grid(True) for index_y in range(data.num_instances): y = [] for index_x in x: y.append(data.get_instance(index_y).get_value(index_x)) ax.plot(x, y, "o-", alpha=0.5) if title is None: title = data.relationname if percent != 100: title += " (%0.1f%%)" % percent fig.canvas.set_window_title(title) plt.draw() if outfile is not None: plt.savefig(outfile) if wait: plt.show()
def line_plot(data, atts=None, percent=100.0, seed=1, title=None, outfile=None, wait=True)
Uses the internal format to plot the dataset, one line per instance. :param data: the dataset :type data: Instances :param atts: the list of 0-based attribute indices of attributes to plot :type atts: list :param percent: the percentage of the dataset to use for plotting :type percent: float :param seed: the seed value to use for subsampling :type seed: int :param title: an optional title :type title: str :param outfile: the (optional) file to save the generated plot to. The extension determines the file format. :type outfile: str :param wait: whether to wait for the user to close the plot :type wait: bool
2.485669
2.407488
1.032474
if isinstance(data, list): result = [] for d in data: result.append(Instances(javabridge.static_call( "Lweka/filters/Filter;", "useFilter", "(Lweka/core/Instances;Lweka/filters/Filter;)Lweka/core/Instances;", d.jobject, self.jobject))) return result else: return Instances(javabridge.static_call( "Lweka/filters/Filter;", "useFilter", "(Lweka/core/Instances;Lweka/filters/Filter;)Lweka/core/Instances;", data.jobject, self.jobject))
def filter(self, data)
Filters the dataset(s). When providing a list, this can be used to create compatible train/test sets, since the filter only gets initialized with the first dataset and all subsequent datasets get transformed using the same setup. NB: inputformat(Instances) must have been called beforehand. :param data: the Instances to filter :type data: Instances or list of Instances :return: the filtered Instances object(s) :rtype: Instances or list of Instances
2.2752
2.091553
1.087804
objects = javabridge.get_env().get_object_array_elements( javabridge.call(self.jobject, "getFilters", "()[Lweka/filters/Filter;")) result = [] for obj in objects: result.append(Filter(jobject=obj)) return result
def filters(self)
Returns the list of base filters. :return: the filter list :rtype: list
3.480597
3.4668
1.00398
obj = [] for fltr in filters: obj.append(fltr.jobject) javabridge.call(self.jobject, "setFilters", "([Lweka/filters/Filter;)V", obj)
def filters(self, filters)
Sets the base filters. :param filters: the list of base filters to use :type filters: list
4.28738
5.231853
0.819476
result = [] result.append(self.__class__.__name__) result.append(re.sub(r'.', '=', self.__class__.__name__)) result.append("") result.append("Supported value names:") for a in self.allowed: result.append(a) return '\n'.join(result)
def generate_help(self)
Generates a help string for this container. :return: the help string :rtype: str
4.355897
4.47113
0.974227
result = super(Transformer, self).post_execute() if result is None: self._input = None return result
def post_execute(self)
Gets executed after the actual execution. :return: None if successful, otherwise error message :rtype: str
6.940053
7.582624
0.915258
return "incremental: " + str(self.config["incremental"]) \ + ", custom: " + str(self.config["use_custom_loader"]) \ + ", loader: " + base.to_commandline(self.config["custom_loader"])
def quickinfo(self)
Returns a short string describing some of the options of the actor. :return: the info, None if not available :rtype: str
7.154207
8.083498
0.885038
opt = "incremental" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Whether to load the dataset incrementally (bool)." opt = "use_custom_loader" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Whether to use a custom loader." opt = "custom_loader" if opt not in options: options[opt] = converters.Loader(classname="weka.core.converters.ArffLoader") if opt not in self.help: self.help[opt] = "The custom loader to use (Loader)." return super(LoadDataset, self).fix_config(options)
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
3.022713
3.063711
0.986618
if token is None: raise Exception(self.full_name + ": No token provided!") if isinstance(token.payload, str): return raise Exception(self.full_name + ": Unhandled class: " + classes.get_classname(token.payload))
def check_input(self, token)
Performs checks on the input token. Raises an exception if unsupported. :param token: the token to check :type token: Token
6.197305
6.552011
0.945863
fname = str(self.input.payload) if not os.path.exists(fname): return "File '" + fname + "' does not exist!" if not os.path.isfile(fname): return "Location '" + fname + "' is not a file!" if self.resolve_option("use_custom_loader"): self._loader = self.resolve_option("custom_loader") else: self._loader = converters.loader_for_file(fname) dataset = self._loader.load_file(fname, incremental=bool(self.resolve_option("incremental"))) if not self.resolve_option("incremental"): self._output.append(Token(dataset)) else: self._iterator = self._loader.__iter__() return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
4.395151
4.263729
1.030823
if self._iterator is not None: try: inst = self._iterator.next() result = Token(inst) except Exception as e: self._iterator = None result = None else: result = super(LoadDataset, self).output() return result
def output(self)
Returns the next available output token. :return: the next token, None if none available :rtype: Token
5.406641
4.521791
1.195686
super(LoadDataset, self).stop_execution() self._loader = None self._iterator = None
def stop_execution(self)
Triggers the stopping of the object.
10.272593
9.539651
1.076831
self._loader = None self._iterator = None super(LoadDataset, self).wrapup()
def wrapup(self)
Finishes up after execution finishes, does not remove any graphical output.
12.014915
12.273273
0.97895
options = super(SetStorageValue, self).fix_config(options) opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The storage value name for storing the payload under (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
6.757708
7.857314
0.860053
if self.storagehandler is None: return "No storage handler available!" self.storagehandler.storage[self.resolve_option("storage_name")] = self.input.payload self._output.append(self.input) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
11.556395
10.404181
1.110745
options = super(DeleteStorageValue, self).fix_config(options) opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The name of the storage value to delete (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
5.002699
5.778937
0.865678
options = super(InitStorageValue, self).fix_config(options) opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The name of the storage value to delete (string)." opt = "value" if opt not in options: options[opt] = "1" if opt not in self.help: self.help[opt] = "The initial value (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
3.445902
3.801734
0.906403
if self.storagehandler is None: return "No storage handler available!" self.storagehandler.storage[self.resolve_option("storage_name")] = eval(str(self.resolve_option("value"))) self._output.append(self.input) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
9.59347
9.0408
1.061131
options = super(UpdateStorageValue, self).fix_config(options) opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The name of the storage value to update (string)." opt = "expression" if opt not in options: options[opt] = "int({X} + 1)" if opt not in self.help: self.help[opt] = "The expression for updating the storage value; use {X} for current value (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
3.789345
4.069009
0.93127
if self.storagehandler is None: return "No storage handler available!" expr = str(self.resolve_option("expression")).replace( "{X}", str(self.storagehandler.storage[str(self.resolve_option("storage_name"))])) expr = self.storagehandler.expand(expr) self.storagehandler.storage[self.resolve_option("storage_name")] = eval(expr) self._output.append(self.input) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
6.15875
5.727062
1.075377
options = super(MathExpression, self).fix_config(options) opt = "expression" if opt not in options: options[opt] = "{X}" if opt not in self.help: self.help[opt] = "The mathematical expression to evaluate (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
5.170448
5.639712
0.916793
expr = str(self.resolve_option("expression")) expr = expr.replace("{X}", str(self.input.payload)) self._output.append(Token(eval(expr))) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
15.926697
16.06299
0.991515
options = super(ClassSelector, self).fix_config(options) opt = "index" if opt not in options: options[opt] = "last" if opt not in self.help: self.help[opt] = "The class index (1-based number); 'first' and 'last' are accepted as well (string)." opt = "unset" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Whether to unset the class index (bool)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
3.696467
3.982488
0.92818
if isinstance(self.input.payload, Instances): inst = None data = self.input.payload elif isinstance(self.input.payload, Instance): inst = self.input.payload data = inst.dataset index = str(self.resolve_option("index")) unset = bool(self.resolve_option("unset")) if unset: data.no_class() else: if index == "first": data.class_is_first() elif index == "last": data.class_is_last() else: data.class_index = int(index) - 1 if inst is None: self._output.append(Token(data)) else: self._output.append(Token(inst)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
4.290696
4.301231
0.997551
options = super(Train, self).fix_config(options) opt = "setup" if opt not in options: options[opt] = Classifier(classname="weka.classifiers.rules.ZeroR") if opt not in self.help: self.help[opt] = "The classifier/clusterer/associator to train (Classifier/Clusterer/Associator)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
6.677081
7.107767
0.939406
if isinstance(self.input.payload, Instances): inst = None data = self.input.payload else: inst = self.input.payload data = inst.dataset retrain = False if (self._header is None) or (self._header.equal_headers(data) is not None) or (inst is None): retrain = True self._header = Instances.template_instances(data, 0) if retrain or (self._model is None): cls = self.resolve_option("setup") if isinstance(cls, Classifier): self._model = Classifier.make_copy(cls) elif isinstance(cls, Clusterer): self._model = Clusterer.make_copy(cls) elif isinstance(cls, Associator): self._model = Associator.make_copy(cls) else: return "Unhandled class: " + classes.get_classname(cls) if retrain: if inst is not None: data = Instances.template_instances(data, 1) data.add_instance(inst) if isinstance(self._model, Classifier): self._model.build_classifier(data) elif isinstance(self._model, Clusterer): self._model.build_clusterer(data) elif isinstance(self._model, Associator): self._model.build_associations(data) else: if isinstance(self._model, Classifier): self._model.update_classifier(inst) elif isinstance(self._model, Clusterer): self._model.update_clusterer(inst) else: return "Cannot train incrementally: " + classes.get_classname(self._model) cont = ModelContainer(model=self._model, header=self._header) self._output.append(Token(cont)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
3.082759
3.048023
1.011396
opt = "setup" if opt not in options: options[opt] = filters.Filter(classname="weka.filters.AllFilter") if opt not in self.help: self.help[opt] = "The filter to apply to the dataset (Filter)." opt = "keep_relationname" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Whether to keep the original relation name (bool)." return super(Filter, self).fix_config(options)
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
4.655805
4.734876
0.9833
if token is None: raise Exception(self.full_name + ": No token provided!") if isinstance(token.payload, Instances): return if isinstance(token.payload, Instance): return raise Exception(self.full_name + ": Unhandled class: " + classes.get_classname(token.payload))
def check_input(self, token)
Performs checks on the input token. Raises an exception if unsupported. :param token: the token to check :type token: Token
5.627841
6.007185
0.936852
if isinstance(self.input.payload, Instance): inst = self.input.payload data = Instances.template_instances(inst.dataset, 1) data.add_instance(inst) else: inst = None data = self.input.payload relname = data.relationname keep = self.resolve_option("keep_relationname") if inst is None: if (self._filter is None) or self._header.equal_headers(data) is not None: self._header = Instances.template_instances(data) self._filter = filters.Filter.make_copy(self.resolve_option("setup")) self._filter.inputformat(data) filtered = self._filter.filter(data) if keep: filtered.relationname = relname self._output.append(Token(filtered)) else: if (self._filter is None) or self._header.equal_headers(data) is not None: self._header = Instances.template_instances(data) self._filter = filters.Filter.make_copy(self.resolve_option("setup")) self._filter.inputformat(data) filtered = self._filter.filter(data) if keep: filtered.relationname = relname self._output.append(Token(filtered.get_instance(0))) else: self._filter.input(inst) self._filter.batch_finished() filtered = self._filter.output() if keep: filtered.dataset.relationname = relname self._output.append(Token(filtered)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
3.593068
3.602852
0.997284
options = super(DeleteFile, self).fix_config(options) opt = "regexp" if opt not in options: options[opt] = ".*" if opt not in self.help: self.help[opt] = "The regular expression that the files must match (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
4.823689
5.463096
0.882959
fname = str(self.input.payload) spattern = str(self.resolve_option("regexp")) pattern = None if (spattern is not None) and (spattern != ".*"): pattern = re.compile(spattern) if (pattern is None) or (pattern.match(fname)): os.remove(fname) self._output.append(self.input) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
5.852952
5.799117
1.009283
options = super(CrossValidate, self).fix_config(options) opt = "setup" if opt not in options: options[opt] = Classifier(classname="weka.classifiers.rules.ZeroR") if opt not in self.help: self.help[opt] = "The classifier/clusterer to train (Classifier/Clusterer)." opt = "folds" if opt not in options: options[opt] = 10 if opt not in self.help: self.help[opt] = "The number of folds for CV (int)." opt = "seed" if opt not in options: options[opt] = 1 if opt not in self.help: self.help[opt] = "The seed value for randomizing the data (int)." opt = "discard_predictions" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Discard classifier predictions to save memory (bool)." opt = "output" if opt not in options: options[opt] = None if opt not in self.help: self.help[opt] = "For capturing the classifier's prediction output (PredictionOutput)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
2.532337
2.605354
0.971974
data = self.input.payload cls = self.resolve_option("setup") if isinstance(cls, Classifier): cls = Classifier.make_copy(cls) evl = Evaluation(data) evl.discard_predictions = bool(self.resolve_option("discard_predictions")) evl.crossvalidate_model( cls, data, int(self.resolve_option("folds")), Random(int(self.resolve_option("seed"))), self.resolve_option("output")) self._output.append(Token(evl)) elif isinstance(cls, Clusterer): cls = Clusterer.make_copy(cls) evl = ClusterEvaluation() llh = evl.crossvalidate_model( cls, data, int(self.resolve_option("folds")), Random(int(self.resolve_option("seed")))) self._output.append(Token(llh)) else: return "Unhandled class: " + classes.get_classname(cls) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
4.266096
4.182817
1.01991
options = super(Evaluate, self).fix_config(options) opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The name of the classifier model in storage (string)." opt = "discard_predictions" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Discard classifier predictions to save memory (bool)." opt = "output" if opt not in options: options[opt] = None if opt not in self.help: self.help[opt] = "For capturing the classifier's prediction output (PredictionOutput)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
3.471926
3.713419
0.934967
data = self.input.payload if self.storagehandler is None: return "No storage handler available!" sname = str(self.resolve_option("storage_name")) if sname not in self.storagehandler.storage: return "No storage item called '" + sname + "' present!" cls = self.storagehandler.storage[sname] if isinstance(cls, Classifier): evl = Evaluation(data) evl.discard_predictions = bool(self.resolve_option("discard_predictions")) evl.test_model( cls, data, self.resolve_option("output")) elif isinstance(cls, Clusterer): evl = ClusterEvaluation() evl.set_model(cls) evl.test_model(data) else: return "Unhandled class: " + classes.get_classname(cls) self._output.append(Token(evl)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
5.079478
4.945368
1.027118
options = super(EvaluationSummary, self).fix_config(options) opt = "title" if opt not in options: options[opt] = None if opt not in self.help: self.help[opt] = "The title for the output (string)." opt = "complexity" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Whether to output classifier complexity information (bool)." opt = "matrix" if opt not in options: options[opt] = False if opt not in self.help: self.help[opt] = "Whether to output the classifier confusion matrix (bool)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
2.823039
3.00814
0.938466
if isinstance(token.payload, Evaluation): return None if isinstance(token.payload, ClusterEvaluation): return None raise Exception( self.full_name + ": Input token is not a supported Evaluation object - " + classes.get_classname(token.payload))
def check_input(self, token)
Performs checks on the input token. Raises an exception if unsupported. :param token: the token to check :type token: Token
10.644255
11.162065
0.95361
evl = self.input.payload if isinstance(evl, Evaluation): summary = evl.summary(title=self.resolve_option("title"), complexity=bool(self.resolve_option("complexity"))) if bool(self.resolve_option("matrix")): summary += "\n" + evl.matrix(title=self.resolve_option("title")) else: summary = evl.cluster_results self._output.append(Token(summary)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
7.207852
7.194637
1.001837
fname = self.input.payload data = serialization.read_all(fname) if len(data) == 1: if is_instance_of(data[0], "weka.classifiers.Classifier"): cont = ModelContainer(model=Classifier(jobject=data[0])) elif is_instance_of(data[0], "weka.clusterers.Clusterer"): cont = ModelContainer(model=Clusterer(jobject=data[0])) else: return "Unhandled class: " + classes.get_classname(data[0]) elif len(data) == 2: if is_instance_of(data[0], "weka.classifiers.Classifier"): cont = ModelContainer(model=Classifier(jobject=data[0]), header=Instances(data[1])) elif is_instance_of(data[0], "weka.clusterers.Clusterer"): cont = ModelContainer(model=Clusterer(jobject=data[0]), header=Instances(data[1])) else: return "Unhandled class: " + classes.get_classname(data[0]) else: return "Expected 1 or 2 objects, but got " + str(len(data)) + " instead reading: " + fname self._output.append(Token(cont)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
2.674342
2.646237
1.010621
opt = "setup" if opt not in options: options[opt] = conversion.PassThrough() if opt not in self.help: self.help[opt] = "The conversion to apply to the input data (Conversion)." return super(Convert, self).fix_config(options)
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
8.438257
8.571314
0.984477
if token is None: raise Exception(self.full_name + ": No token provided!") self.config["setup"].check_input(token.payload)
def check_input(self, token)
Performs checks on the input token. Raises an exception if unsupported. :param token: the token to check :type token: Token
11.124425
12.729884
0.873883
conv = self.config["setup"].shallow_copy() conv.input = self._input.payload result = conv.convert() if result is None: if conv.output is not None: self._output.append(Token(conv.output)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
10.812778
10.058526
1.074986
return "search: " + base.to_commandline(self.config["search"]) + ", eval: " \ + base.to_commandline(self.config["eval"])
def quickinfo(self)
Returns a short string describing some of the options of the actor. :return: the info, None if not available :rtype: str
10.098751
11.411718
0.884946
opt = "search" if opt not in options: options[opt] = attsel.ASSearch(classname="weka.attributeSelection.BestFirst") if opt not in self.help: self.help[opt] = "The search algorithm to use (ASSearch)." opt = "eval" if opt not in options: options[opt] = attsel.ASEvaluation(classname="weka.attributeSelection.CfsSubsetEval") if opt not in self.help: self.help[opt] = "The evaluation algorithm to use (ASEvaluation)." return super(AttributeSelection, self).fix_config(options)
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
3.67751
3.770751
0.975272
if token is None: raise Exception(self.full_name + ": No token provided!") if not isinstance(token.payload, Instances): raise Exception(self.full_name + ": Not an Instances object!")
def check_input(self, token)
Performs checks on the input token. Raises an exception if unsupported. :param token: the token to check :type token: Token
5.76839
6.497803
0.887745
data = self.input.payload search = self.config["search"].shallow_copy() evl = self.config["eval"].shallow_copy() asel = attsel.AttributeSelection() asel.search(search) asel.evaluator(evl) asel.select_attributes(data) cont = AttributeSelectionContainer( original=data, reduced=asel.reduce_dimensionality(data), num_atts=asel.number_attributes_selected, selected=asel.selected_attributes, results=asel.results_string) self._output.append(Token(cont)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
7.303955
7.126659
1.024878
options = super(RenameRelation, self).fix_config(options) opt = "name" if opt not in options: options[opt] = "newname" if opt not in self.help: self.help[opt] = "The new relation name to use (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
4.793558
5.389555
0.889416
relname = self.resolve_option("name") if isinstance(self.input.payload, Instance): self.input.payload.dataset.relationname = relname else: self.input.payload.relationname = relname self._output.append(self.input) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
7.582281
7.654747
0.990533
if isinstance(self.input.payload, classes.JavaObject) and self.input.payload.is_serializable: copy = serialization.deepcopy(self.input.payload) if copy is not None: self._output.append(Token(copy)) else: self._output.append(self.input) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
6.219161
6.217138
1.000325
options = super(Predict, self).fix_config(options) opt = "model" if opt not in options: options[opt] = "." if opt not in self.help: self.help[opt] = "The serialized model to use for making predictions (string)." opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The name of the model (or ModelContainer) in storage to use (string)." return options
def fix_config(self, options)
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
4.14488
4.413836
0.939065
if isinstance(token.payload, Instance): return raise Exception(self.full_name + ": Unhandled data type: " + str(token.payload.__class__.__name__))
def check_input(self, token)
Performs checks on the input token. Raises an exception if unsupported. :param token: the token to check :type token: Token
9.221407
10.103695
0.912677
inst = self.input.payload if not inst.has_class: return "No class set!" # load model? if self._model is None: model = None fname = str(self.resolve_option("model")) if os.path.isfile(fname): model = serialization.read(fname) else: name = self.resolve_option("storage_name") if name in self.storagehandler.storage: model = self.storagehandler.storage.get(name) if isinstance(model, ModelContainer): model = model.get("Model").jobject if model is None: return "No model available from storage or serialized file!" self._is_classifier = is_instance_of(model, "weka.classifiers.Classifier") if self._is_classifier: self._model = Classifier(jobject=model) else: self._model = Clusterer(jobject=model) if self._is_classifier: cls = self._model.classify_instance(inst) dist = self._model.distribution_for_instance(inst) label = inst.class_attribute.value(int(cls)) cont = ClassificationContainer(inst=inst, classification=cls, distribution=dist, label=label) else: cls = self._model.cluster_instance(inst) dist = self._model.distribution_for_instance(inst) cont = ClusteringContainer(inst=inst, cluster=int(cls), distribution=dist) self._output.append(Token(cont)) return None
def do_execute(self)
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
3.727619
3.649939
1.021282
if not plot.pygraphviz_available: logger.error("Pygraphviz is not installed, cannot generate graph plot!") return if not plot.PIL_available: logger.error("PIL is not installed, cannot display graph plot!") return agraph = AGraph(graph) agraph.layout(prog='dot') if filename is None: filename = tempfile.mktemp(suffix=".png") agraph.draw(filename) image = Image.open(filename) image.show()
def plot_dot_graph(graph, filename=None)
Plots a graph in graphviz dot notation. :param graph: the dot notation graph :type graph: str :param filename: the (optional) file to save the generated plot to. The extension determines the file format. :type filename: str
2.969424
3.48731
0.851494
return javabridge.get_env().get_string(self.__stem(javabridge.get_env().new_string_utf(s)))
def stem(self, s)
Performs stemming on the string. :param s: the string to stem :type s: str :return: the stemmed string :rtype: str
5.560882
5.7279
0.970841
if y is not None: if len(x) != len(y): raise Exception("Dimensions of x and y differ: " + str(len(x)) + " != " + str(len(y))) # create header atts = [] for i in range(len(x[0])): atts.append(Attribute.create_numeric("x" + str(i+1))) if y is not None: atts.append(Attribute.create_numeric("y")) result = Instances.create_instances(name, atts, len(x)) # add data for i in range(len(x)): values = list(x[i]) if y is not None: values.append(y[i]) result.add_instance(Instance.create_instance(values)) return result
def create_instances_from_matrices(x, y=None, name="data")
Allows the generation of an Instances object from a 2-dimensional matrix for X and a 1-dimensional matrix for Y (optional). All data must be numerical. Attributes can be converted to nominal with the weka.filters.unsupervised.attribute.NumericToNominal filter. :param x: the input variables :type x: ndarray :param y: the output variable (optional) :type y: ndarray :param name: the name of the dataset :type name: str :return: the generated dataset :rtype: Instances
2.154844
2.108949
1.021762
att = self.__attribute_by_name(javabridge.get_env().new_string(name)) if att is None: return None else: return Attribute(att)
def attribute_by_name(self, name)
Returns the specified attribute, None if not found. :param name: the name of the attribute :type name: str :return: the attribute or None :rtype: Attribute
5.01579
5.115084
0.980588
if index is None: self.__append_instance(inst.jobject) else: self.__insert_instance(index, inst.jobject)
def add_instance(self, inst, index=None)
Adds the specified instance to the dataset. :param inst: the Instance to add :type inst: Instance :param index: the 0-based index where to add the Instance :type index: int
4.119404
4.728401
0.871205
return Instance( self.__set_instance(index, inst.jobject))
def set_instance(self, index, inst)
Sets the Instance at the specified location in the dataset. :param index: the 0-based index of the instance to replace :type index: int :param inst: the Instance to set :type inst: Instance :return: the instance :rtype: Instance
25.897547
33.404037
0.775282
if index is None: javabridge.call(self.jobject, "delete", "()V") else: javabridge.call(self.jobject, "delete", "(I)V", index)
def delete(self, index=None)
Removes either the specified Instance or all Instance objects. :param index: the 0-based index of the instance to remove :type index: int
2.488287
2.565766
0.969803
javabridge.call(self.jobject, "insertAttributeAt", "(Lweka/core/Attribute;I)V", att.jobject, index)
def insert_attribute(self, att, index)
Inserts the attribute at the specified location. :param att: the attribute to insert :type att: Attribute :param index: the index to insert the attribute at :type index: int
4.248164
3.967657
1.070698
if random is None: return Instances( javabridge.call(self.jobject, "trainCV", "(II)Lweka/core/Instances;", num_folds, fold)) else: return Instances( javabridge.call(self.jobject, "trainCV", "(IILjava/util/Random;)Lweka/core/Instances;", num_folds, fold, random.jobject))
def train_cv(self, num_folds, fold, random=None)
Generates a training fold for cross-validation. :param num_folds: the number of folds of cross-validation, eg 10 :type num_folds: int :param fold: the current fold (0-based) :type fold: int :param random: the random number generator :type random: Random :return: the training fold :rtype: Instances
2.836482
2.446155
1.159567
if from_row is None or num_rows is None: return Instances( javabridge.make_instance( "weka/core/Instances", "(Lweka/core/Instances;)V", dataset.jobject)) else: dataset = cls.copy_instances(dataset) return Instances( javabridge.make_instance( "weka/core/Instances", "(Lweka/core/Instances;II)V", dataset.jobject, from_row, num_rows))
def copy_instances(cls, dataset, from_row=None, num_rows=None)
Creates a copy of the Instances. If either from_row or num_rows are None, then all of the data is being copied. :param dataset: the original dataset :type dataset: Instances :param from_row: the 0-based start index of the rows to copy :type from_row: int :param num_rows: the number of rows to copy :type num_rows: int :return: the copy of the data :rtype: Instances
2.641627
2.662096
0.992311
return Instances( javabridge.make_instance( "weka/core/Instances", "(Lweka/core/Instances;I)V", dataset.jobject, capacity))
def template_instances(cls, dataset, capacity=0)
Uses the Instances as template to create an empty dataset. :param dataset: the original dataset :type dataset: Instances :param capacity: how many data rows to reserve initially (see compactify) :type capacity: int :return: the empty dataset :rtype: Instances
7.037639
7.221372
0.974557
attributes = [] for att in atts: attributes.append(att.jobject) return Instances( javabridge.make_instance( "weka/core/Instances", "(Ljava/lang/String;Ljava/util/ArrayList;I)V", name, javabridge.make_list(attributes), capacity))
def create_instances(cls, name, atts, capacity)
Creates a new Instances. :param name: the relation name :type name: str :param atts: the list of attributes to use for the dataset :type atts: list of Attribute :param capacity: how many data rows to reserve initially (see compactify) :type capacity: int :return: the dataset :rtype: Instances
4.364546
4.181293
1.043827
return Instances(javabridge.static_call( "weka/core/Instances", "mergeInstances", "(Lweka/core/Instances;Lweka/core/Instances;)Lweka/core/Instances;", inst1.jobject, inst2.jobject))
def merge_instances(cls, inst1, inst2)
Merges the two datasets (side-by-side). :param inst1: the first dataset :type inst1: Instances or str :param inst2: the first dataset :type inst2: Instances :return: the combined dataset :rtype: Instances
3.244851
3.23283
1.003718
dataset = javabridge.call(self.jobject, "dataset", "()Lweka/core/Instances;") if dataset is None: return None else: return Instances(dataset)
def dataset(self)
Returns the dataset that this instance belongs to. :return: the dataset or None if no dataset set :rtype: Instances
3.823583
3.6591
1.044952
return self.__set_string_value(index, javabridge.get_env().new_string(s))
def set_string_value(self, index, s)
Sets the string value at the specified position (0-based). :param index: the 0-based index of the inernal value :type index: int :param s: the string value :type s: str
5.744958
7.379081
0.778547
jni_classname = classname.replace(".", "/") if type(values) is list: for i in range(len(values)): values[i] = float(values[i]) values = numpy.array(values) return Instance( javabridge.make_instance( jni_classname, "(D[D)V", weight, javabridge.get_env().make_double_array(values)))
def create_instance(cls, values, classname="weka.core.DenseInstance", weight=1.0)
Creates a new instance. :param values: the float values (internal format) to use, numpy array or list. :type values: ndarray or list :param classname: the classname of the instance (eg weka.core.DenseInstance). :type classname: str :param weight: the weight of the instance :type weight: float
4.115379
4.371419
0.941429
jni_classname = classname.replace(".", "/") indices = [] vals = [] for (i, v) in values: indices.append(i) vals.append(float(v)) indices = numpy.array(indices, dtype=numpy.int32) vals = numpy.array(vals) return Instance( javabridge.make_instance( jni_classname, "(D[D[II)V", weight, javabridge.get_env().make_double_array(vals), javabridge.get_env().make_int_array(indices), max_values))
def create_sparse_instance(cls, values, max_values, classname="weka.core.SparseInstance", weight=1.0)
Creates a new sparse instance. :param values: the list of tuples (0-based index and internal format float). The indices of the tuples must be in ascending order and "max_values" must be set to the maximum number of attributes in the dataset. :type values: list :param max_values: the maximum number of attributes :type max_values: int :param classname: the classname of the instance (eg weka.core.SparseInstance). :type classname: str :param weight: the weight of the instance :type weight: float
3.378784
3.406919
0.991742
enm = javabridge.call(self.jobject, "enumerateValues", "()Ljava/util/Enumeration;") if enm is None: return None else: return typeconv.enumeration_to_list(enm)
def values(self)
Returns the labels, strings or relation-values. :return: all the values, None if not NOMINAL, STRING, or RELATION :rtype: list
6.154755
6.907508
0.891024
if short: return javabridge.static_call( "weka/core/Attribute", "typeToStringShort", "(Lweka/core/Attribute;)Ljava/lang/String;", self.jobject) else: return javabridge.static_call( "weka/core/Attribute", "typeToString", "(Lweka/core/Attribute;)Ljava/lang/String;", self.jobject)
def type_str(self, short=False)
Returns the type of the attribute as string. :return: the type :rtype: str
2.136451
2.132722
1.001749
if name is None: return Attribute( javabridge.call(self.jobject, "copy", "()Ljava/lang/Object;")) else: return Attribute( javabridge.call(self.jobject, "copy", "(Ljava/lang/String;)Lweka/core/Attribute;", name))
def copy(self, name=None)
Creates a copy of this attribute. :param name: the new name, uses the old one if None :type name: str :return: the copy of the attribute :rtype: Attribute
2.86642
2.762825
1.037496
return Attribute( javabridge.make_instance( "weka/core/Attribute", "(Ljava/lang/String;Ljava/util/List;)V", name, javabridge.make_list(labels)))
def create_nominal(cls, name, labels)
Creates a nominal attribute. :param name: the name of the attribute :type name: str :param labels: the list of string labels to use :type labels: list
4.741653
5.490652
0.863586
return Attribute( javabridge.make_instance( "weka/core/Attribute", "(Ljava/lang/String;Lweka/core/Instances;)V", name, inst.jobject))
def create_relational(cls, name, inst)
Creates a relational attribute. :param name: the name of the attribute :type name: str :param inst: the structure of the relational attribute :type inst: Instances
6.117594
7.008978
0.872823
# determine lib directory with jars rootdir = os.path.split(os.path.dirname(__file__))[0] libdir = rootdir + os.sep + "lib" # add jars from lib directory for l in glob.glob(libdir + os.sep + "*.jar"): if l.lower().find("-src.") == -1: javabridge.JARS.append(str(l))
def add_bundled_jars()
Adds the bundled jars to the JVM's classpath.
4.20117
4.292566
0.978708
if 'CLASSPATH' in os.environ: parts = os.environ['CLASSPATH'].split(os.pathsep) for part in parts: javabridge.JARS.append(part)
def add_system_classpath()
Adds the system's classpath to the JVM's classpath.
3.055835
3.057463
0.999468
global started if started is not None: logger.info("JVM already running, call jvm.stop() first") return # add user-defined jars first if class_path is not None: for cp in class_path: logger.debug("Adding user-supplied classpath=" + cp) javabridge.JARS.append(cp) if bundled: logger.debug("Adding bundled jars") add_bundled_jars() if system_cp: logger.debug("Adding system classpath") add_system_classpath() logger.debug("Classpath=" + str(javabridge.JARS)) logger.debug("MaxHeapSize=" + ("default" if (max_heap_size is None) else max_heap_size)) args = [] weka_home = None if packages is not None: if isinstance(packages, bool): if packages: logger.debug("Package support enabled") else: logger.debug("Package support disabled") args.append("-Dweka.packageManager.loadPackages=false") if isinstance(packages, str): if os.path.exists(packages) and os.path.isdir(packages): logger.debug("Using alternative Weka home directory: " + packages) weka_home = packages else: logger.warning("Invalid Weka home: " + packages) javabridge.start_vm(args=args, run_headless=True, max_heap_size=max_heap_size) javabridge.attach() started = True if weka_home is not None: from weka.core.classes import Environment env = Environment.system_wide() logger.debug("Using alternative Weka home directory: " + packages) env.add_variable("WEKA_HOME", weka_home) # initialize package manager javabridge.static_call( "Lweka/core/WekaPackageManager;", "loadPackages", "(Z)V", False)
def start(class_path=None, bundled=True, packages=False, system_cp=False, max_heap_size=None)
Initializes the javabridge connection (starts up the JVM). :param class_path: the additional classpath elements to add :type class_path: list :param bundled: whether to add jars from the "lib" directory :type bundled: bool :param packages: whether to add jars from Weka packages as well (bool) or an alternative Weka home directory (str) :type packages: bool or str :param system_cp: whether to add the system classpath as well :type system_cp: bool :param max_heap_size: the maximum heap size (-Xmx parameter, eg 512m or 4g) :type max_heap_size: str
2.878108
2.760155
1.042734
parts = classname.split('.') module = ".".join(parts[:-1]) m = __import__(module) for comp in parts[1:]: m = getattr(m, comp) return m
def get_class(classname)
Returns the class object associated with the dot-notation classname. Taken from here: http://stackoverflow.com/a/452981 :param classname: the classname :type classname: str :return: the class object :rtype: object
1.638995
1.884149
0.869886
try: return javabridge.class_for_name(classname) except: return javabridge.static_call( "Lweka/core/ClassHelper;", "forName", "(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Class;", javabridge.class_for_name("java.lang.Object"), classname)
def get_jclass(classname)
Returns the Java class object associated with the dot-notation classname. :param classname: the classname :type classname: str :return: the class object :rtype: JB_Object
3.151213
3.388691
0.92992
try: return javabridge.get_static_field(classname, fieldname, signature) except: return javabridge.static_call( "Lweka/core/ClassHelper;", "getStaticField", "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/Object;", classname, fieldname)
def get_static_field(classname, fieldname, signature)
Returns the Java object associated with the static field of the specified class. :param classname: the classname of the class to get the field from :type classname: str :param fieldname: the name of the field to retriev :type fieldname: str :return: the object :rtype: JB_Object
2.620389
3.030438
0.86469
if isinstance(obj, javabridge.JB_Object): cls = javabridge.call(obj, "getClass", "()Ljava/lang/Class;") return javabridge.call(cls, "getName", "()Ljava/lang/String;") elif inspect.isclass(obj): return obj.__module__ + "." + obj.__name__ else: return get_classname(obj.__class__)
def get_classname(obj)
Returns the classname of the JB_Object, Python class or object. :param obj: the java object or Python class/object to get the classname for :type obj: object :return: the classname :rtype: str
2.171626
2.167011
1.00213
class_or_intf_name = class_or_intf_name.replace("/", ".") classname = get_classname(obj) # array? retrieve component type and check that if is_array(obj): jarray = JavaArray(jobject=obj) classname = jarray.component_type() result = javabridge.static_call( "Lweka/core/InheritanceUtils;", "isSubclass", "(Ljava/lang/String;Ljava/lang/String;)Z", class_or_intf_name, classname) if result: return True return javabridge.static_call( "Lweka/core/InheritanceUtils;", "hasInterface", "(Ljava/lang/String;Ljava/lang/String;)Z", class_or_intf_name, classname)
def is_instance_of(obj, class_or_intf_name)
Checks whether the Java object implements the specified interface or is a subclass of the superclass. :param obj: the Java object to check :type obj: JB_Object :param class_or_intf_name: the superclass or interface to check, dot notation or with forward slashes :type class_or_intf_name: str :return: true if either implements interface or subclass of superclass :rtype: bool
3.160551
3.073107
1.028455
params = split_options(cmdline) cls = params[0] params = params[1:] handler = OptionHandler(javabridge.static_call( "Lweka/core/Utils;", "forName", "(Ljava/lang/Class;Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/Object;", javabridge.class_for_name("java.lang.Object"), cls, params)) if classname is None: return handler else: c = get_class(classname) return c(jobject=handler.jobject)
def from_commandline(cmdline, classname=None)
Creates an OptionHandler based on the provided commandline string. :param cmdline: the commandline string to use :type cmdline: str :param classname: the classname of the wrapper to return other than OptionHandler (in dot-notation) :type classname: str :return: the generated option handler instance :rtype: object
4.123589
4.182929
0.985814
result = javabridge.get_collection_wrapper( javabridge.static_call( "Lweka/Run;", "findSchemeMatch", "(Ljava/lang/String;Z)Ljava/util/List;", classname, True)) if len(result) == 1: return str(result[0]) elif len(result) == 0: raise Exception("No classname matches found for: " + classname) else: matches = [] for i in range(len(result)): matches.append(str(result[i])) raise Exception("Found multiple matches for '" + classname + "':\n" + '\n'.join(matches))
def complete_classname(classname)
Attempts to complete a partial classname like '.J48' and returns the full classname if a single match was found, otherwise an exception is raised. :param classname: the partial classname to expand :type classname: str :return: the full classname :rtype: str
3.524683
3.486715
1.010889
parser = argparse.ArgumentParser( description='Performs option handling operations from the command-line. Calls JVM start/stop automatically.') parser.add_argument("-j", metavar="classpath", dest="classpath", help="additional classpath, jars/directories") parser.add_argument("-action", metavar="action", dest="action", required=True, help="The action to perform on the options: join=create single string, "\ "split=create array from quoted option, code=generate code from options") parser.add_argument("option", nargs=argparse.REMAINDER, help="The option(s) to process") parsed = parser.parse_args() jars = [] if parsed.classpath is not None: jars = parsed.classpath.split(os.pathsep) jvm.start(jars, packages=True) try: if parsed.action == "join": output = "cmdline = \"" + backquote(join_options(parsed.option)) + "\"" elif parsed.action == "split" and len(parsed.option) == 1: output = "options = [\n" opts = split_options(parsed.option[0]) for idx, opt in enumerate(opts): if idx > 0: output += ",\n" output += " \"" + backquote(opt) + "\"" output += "]" elif parsed.action == "code": options = parsed.option[:] cname = None # classname + options? if options[0].find(".") > -1: cname = options[0] options = options[1:] output = "options = [ \n" for idx, opt in enumerate(options): if idx > 0: output += ",\n" output += " \"" + backquote(opt) + "\"" output += "]\n" if cname is not None: output += 'handler = OptionHandler(JavaObject.new_instance("' + cname + '"))\n' output += 'handler.options = options\n' else: raise Exception("Unsupported action: " + parsed.action) if output is not None: print(output) except Exception as e: print(e) finally: jvm.stop()
def main()
Runs a classifier from the command-line. Calls JVM start/stop automatically. Use -h to see all options.
3.145871
2.939903
1.07006
return json.dumps(self.to_dict(), sort_keys=True, indent=2, separators=(',', ': '))
def to_json(self)
Returns the options as JSON. :return: the object as string :rtype: str
2.689218
3.063118
0.877935
d = json.loads(s) return get_dict_handler(d["type"])(d)
def from_json(cls, s)
Restores the object from the given JSON. :param s: the JSON string to parse :type s: str :return: the
9.717994
19.791954
0.491007
result = {} result["type"] = "Configurable" result["class"] = get_classname(self) result["config"] = {} for k in self._config: v = self._config[k] if isinstance(v, JSONObject): result["config"][k] = v.to_dict() else: result["config"][k] = v return result
def to_dict(self)
Returns a dictionary that represents this object, to be used for JSONification. :return: the object dictionary :rtype: dict
2.890927
2.825859
1.023026
conf = {} for k in d["config"]: v = d["config"][k] if isinstance(v, dict): conf[str(k)] = get_dict_handler(d["config"]["type"])(v) else: conf[str(k)] = v return get_class(str(d["class"]))(config=conf)
def from_dict(cls, d)
Restores its state from a dictionary, used in de-JSONification. :param d: the object dictionary :type d: dict
3.989102
4.143539
0.962728
if self._logger is None: self._logger = self.new_logger() return self._logger
def logger(self)
Returns the logger object. :return: the logger :rtype: logger
3.930452
4.33371
0.906949
result = [] result.append(self.__class__.__name__) result.append(re.sub(r'.', '=', self.__class__.__name__)) result.append("") result.append("DESCRIPTION") result.append(self.description()) result.append("") result.append("OPTIONS") opts = sorted(self.config.keys()) for opt in opts: result.append(opt) helpstr = self.help[opt] if helpstr is None: helpstr = "-missing help-" result.append("\t" + helpstr) result.append("") return '\n'.join(result)
def generate_help(self)
Generates a help string for this actor. :return: the help string :rtype: str
2.751572
2.784801
0.988068
cls = javabridge.call(self.jobject, "getClass", "()Ljava/lang/Class;") return javabridge.call(cls, "getName", "()Ljava/lang/String;")
def classname(self)
Returns the Java classname in dot-notation. :return: the Java classname :rtype: str
3.378295
3.169067
1.066022
# unwrap? if isinstance(jobject, JavaObject): jobject = jobject.jobject javabridge.static_call( "Lweka/core/PropertyPath;", "setValue", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V", self.jobject, path, jobject)
def set_property(self, path, jobject)
Attempts to set the value (jobject, a Java object) of the provided (bean) property path. :param path: the property path, e.g., "filter" for a setFilter(...)/getFilter() method pair :type path: str :param jobject: the Java object to set; if instance of JavaObject class, the jobject member is automatically used :type jobject: JB_Object
4.117914
3.871305
1.063702
if not cls.check_type(jobject, intf_or_class): raise TypeError("Object does not implement or subclass " + intf_or_class + ": " + get_classname(jobject))
def enforce_type(cls, jobject, intf_or_class)
Raises an exception if the object does not implement the specified interface or is not a subclass. :param jobject: the Java object to check :type jobject: JB_Object :param intf_or_class: the classname in Java notation (eg "weka.core.DenseInstance") :type intf_or_class: str
3.864321
3.178762
1.215669
try: return javabridge.static_call( "Lweka/core/Utils;", "forName", "(Ljava/lang/Class;Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/Object;", javabridge.class_for_name("java.lang.Object"), classname, []) except JavaException as e: print("Failed to instantiate " + classname + ": " + str(e)) return None
def new_instance(cls, classname)
Creates a new object from the given classname using the default constructor, None in case of error. :param classname: the classname in Java notation (eg "weka.core.DenseInstance") :type classname: str :return: the Java object :rtype: JB_Object
3.231758
3.058582
1.05662
if system_wide: javabridge.call(self.jobject, "addVariableSystemWide", "(Ljava/lang/String;Ljava/lang/String;)V", key, value) else: javabridge.call(self.jobject, "addVariable", "(Ljava/lang/String;Ljava/lang/String;)V", key, value)
def add_variable(self, key, value, system_wide=False)
Adds the environment variable. :param key: the name of the variable :type key: str :param value: the value :type value: str :param system_wide: whether to add the variable system wide :type system_wide: bool
1.917858
1.879204
1.020569