Skip to content

Module fri.model

View Source
from .classification import Classification

from .lupi_classification import LUPI_Classification

from .lupi_ordinal_regression import LUPI_OrdinalRegression

from .lupi_regression import LUPI_Regression

from .ordinal_regression import OrdinalRegression

from .regression import Regression

__all__ = [

    "Classification",

    "Regression",

    "OrdinalRegression",

    "LUPI_Classification",

    "LUPI_Regression",

    "LUPI_OrdinalRegression",

]

Sub-modules

Classes

Classification

class Classification(
    **kwargs
)

Helper class that provides a standard way to create an ABC using inheritance.

View Source
class Classification(ProblemType):

    @classmethod

    def parameters(cls):

        return ["C"]

    @property

    def get_initmodel_template(cls):

        return Classification_SVM

    @property

    def get_cvxproblem_template(cls):

        return Classification_Relevance_Bound

    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

    def preprocessing(self, data, **kwargs):

        X, y = data

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        # Store the classes seen during fit

        classes_ = unique_labels(y)

        if len(classes_) > 2:

            raise ValueError("Only binary class data supported")

        # Negative class is set to -1 for decision surface

        y = preprocessing.LabelEncoder().fit_transform(y)

        y[y == 0] = -1

        return X, y

Ancestors (in MRO)

  • fri.model.base_type.ProblemType
  • abc.ABC

Static methods

parameters
def parameters(

)
View Source
    @classmethod

    def parameters(cls):

        return ["C"]

Instance variables

get_cvxproblem_template
get_initmodel_template

Methods

get_all_parameters
def get_all_parameters(
    self
)
View Source
    def get_all_parameters(self):

        return {p: self.get_chosen_parameter(p) for p in self.parameters()}
get_all_relax_factors
def get_all_relax_factors(
    self
)
View Source
    def get_all_relax_factors(self):

        return {p: self.get_chosen_relax_factors(p) for p in self.relax_factors()}
get_chosen_parameter
def get_chosen_parameter(
    self,
    p
)
View Source
    def get_chosen_parameter(self, p):

        try:

            return [

                self.chosen_parameters_[p]

            ]  # We return list for param search function

        except:

            # # TODO: rewrite the parameter logic

            # # TODO: move this to subclass

            if p == "scaling_lupi_w":

                # return [0.1, 1, 10, 100, 1000]

                return scipy.stats.reciprocal(a=1e-15, b=1e10)

            # if p == "scaling_lupi_loss":

            #    # value 0>p<1 causes standard svm solution

            #    # p>1 encourages usage of lupi function

            #    return scipy.stats.reciprocal(a=1e-15, b=1e15)

            if p == "C":

                return scipy.stats.reciprocal(a=1e-5, b=1e5)

            if p == "epsilon":

                return [0, 0.001, 0.01, 0.1, 1, 10, 100]

            else:

                return scipy.stats.reciprocal(a=1e-10, b=1e10)
get_chosen_relax_factors
def get_chosen_relax_factors(
    self,
    p
)
View Source
    def get_chosen_relax_factors(self, p):

        try:

            factor = self.relax_factors_[p]

        except KeyError:

            try:

                factor = self.relax_factors_[p + "_slack"]

            except KeyError:

                factor = 0.1

        if factor < 0:

            raise ValueError("Slack Factor multiplier is positive!")

        return factor
get_relaxed_constraints
def get_relaxed_constraints(
    self,
    constraints
)
View Source
    def get_relaxed_constraints(self, constraints):

        return {c: self.relax_constraint(c, v) for c, v in constraints.items()}
postprocessing
def postprocessing(
    self,
    bounds
)
View Source
    def postprocessing(self, bounds):

        return bounds
preprocessing
def preprocessing(
    self,
    data,
    **kwargs
)
View Source
    def preprocessing(self, data, **kwargs):

        X, y = data

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        # Store the classes seen during fit

        classes_ = unique_labels(y)

        if len(classes_) > 2:

            raise ValueError("Only binary class data supported")

        # Negative class is set to -1 for decision surface

        y = preprocessing.LabelEncoder().fit_transform(y)

        y[y == 0] = -1

        return X, y
relax_constraint
def relax_constraint(
    self,
    key,
    value
)
View Source
    def relax_constraint(self, key, value):

        return value * (1 + self.get_chosen_relax_factors(key))
relax_factors
def relax_factors(
    cls
)
View Source
    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

LUPI_Classification

class LUPI_Classification(
    **kwargs
)

Helper class that provides a standard way to create an ABC using inheritance.

View Source
class LUPI_Classification(ProblemType):

    def __init__(self, **kwargs):

        super().__init__(**kwargs)

        self._lupi_features = None

    @property

    def lupi_features(self):

        return self._lupi_features

    @classmethod

    def parameters(cls):

        return ["C", "scaling_lupi_w", "scaling_lupi_loss"]

    @property

    def get_initmodel_template(cls):

        return LUPI_Classification_SVM

    @property

    def get_cvxproblem_template(cls):

        return LUPI_Classification_Relevance_Bound

    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

    def preprocessing(self, data, lupi_features=None):

        X, y = data

        d = X.shape[1]

        if lupi_features is None:

            raise ValueError("Argument 'lupi_features' missing in fit() call.")

        if not isinstance(lupi_features, int):

            raise ValueError("Argument 'lupi_features' is not type int.")

        if not 0 < lupi_features < d:

            raise ValueError(

                "Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."

            )

        self._lupi_features = lupi_features

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        # Store the classes seen during fit

        classes_ = unique_labels(y)

        if len(classes_) > 2:

            raise ValueError("Only binary class data supported")

        # Negative class is set to -1 for decision surface

        y = preprocessing.LabelEncoder().fit_transform(y)

        y[y == 0] = -1

        return X, y

Ancestors (in MRO)

  • fri.model.base_type.ProblemType
  • abc.ABC

Static methods

parameters
def parameters(

)
View Source
    @classmethod

    def parameters(cls):

        return ["C", "scaling_lupi_w", "scaling_lupi_loss"]

Instance variables

get_cvxproblem_template
get_initmodel_template
lupi_features

Methods

get_all_parameters
def get_all_parameters(
    self
)
View Source
    def get_all_parameters(self):

        return {p: self.get_chosen_parameter(p) for p in self.parameters()}
get_all_relax_factors
def get_all_relax_factors(
    self
)
View Source
    def get_all_relax_factors(self):

        return {p: self.get_chosen_relax_factors(p) for p in self.relax_factors()}
get_chosen_parameter
def get_chosen_parameter(
    self,
    p
)
View Source
    def get_chosen_parameter(self, p):

        try:

            return [

                self.chosen_parameters_[p]

            ]  # We return list for param search function

        except:

            # # TODO: rewrite the parameter logic

            # # TODO: move this to subclass

            if p == "scaling_lupi_w":

                # return [0.1, 1, 10, 100, 1000]

                return scipy.stats.reciprocal(a=1e-15, b=1e10)

            # if p == "scaling_lupi_loss":

            #    # value 0>p<1 causes standard svm solution

            #    # p>1 encourages usage of lupi function

            #    return scipy.stats.reciprocal(a=1e-15, b=1e15)

            if p == "C":

                return scipy.stats.reciprocal(a=1e-5, b=1e5)

            if p == "epsilon":

                return [0, 0.001, 0.01, 0.1, 1, 10, 100]

            else:

                return scipy.stats.reciprocal(a=1e-10, b=1e10)
get_chosen_relax_factors
def get_chosen_relax_factors(
    self,
    p
)
View Source
    def get_chosen_relax_factors(self, p):

        try:

            factor = self.relax_factors_[p]

        except KeyError:

            try:

                factor = self.relax_factors_[p + "_slack"]

            except KeyError:

                factor = 0.1

        if factor < 0:

            raise ValueError("Slack Factor multiplier is positive!")

        return factor
get_relaxed_constraints
def get_relaxed_constraints(
    self,
    constraints
)
View Source
    def get_relaxed_constraints(self, constraints):

        return {c: self.relax_constraint(c, v) for c, v in constraints.items()}
postprocessing
def postprocessing(
    self,
    bounds
)
View Source
    def postprocessing(self, bounds):

        return bounds
preprocessing
def preprocessing(
    self,
    data,
    lupi_features=None
)
View Source
    def preprocessing(self, data, lupi_features=None):

        X, y = data

        d = X.shape[1]

        if lupi_features is None:

            raise ValueError("Argument 'lupi_features' missing in fit() call.")

        if not isinstance(lupi_features, int):

            raise ValueError("Argument 'lupi_features' is not type int.")

        if not 0 < lupi_features < d:

            raise ValueError(

                "Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."

            )

        self._lupi_features = lupi_features

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        # Store the classes seen during fit

        classes_ = unique_labels(y)

        if len(classes_) > 2:

            raise ValueError("Only binary class data supported")

        # Negative class is set to -1 for decision surface

        y = preprocessing.LabelEncoder().fit_transform(y)

        y[y == 0] = -1

        return X, y
relax_constraint
def relax_constraint(
    self,
    key,
    value
)
View Source
    def relax_constraint(self, key, value):

        return value * (1 + self.get_chosen_relax_factors(key))
relax_factors
def relax_factors(
    cls
)
View Source
    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

LUPI_OrdinalRegression

class LUPI_OrdinalRegression(
    **kwargs
)

Helper class that provides a standard way to create an ABC using inheritance.

View Source
class LUPI_OrdinalRegression(ProblemType):

    def __init__(self, **kwargs):

        super().__init__(**kwargs)

        self._lupi_features = None

    @property

    def lupi_features(self):

        return self._lupi_features

    @classmethod

    def parameters(cls):

        return ["C", "scaling_lupi_w"]

    @property

    def get_initmodel_template(cls):

        return LUPI_OrdinalRegression_SVM

    @property

    def get_cvxproblem_template(cls):

        return LUPI_OrdinalRegression_Relevance_Bound

    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

    def preprocessing(self, data, lupi_features=None):

        X, y = data

        d = X.shape[1]

        if lupi_features is None:

            raise ValueError("Argument 'lupi_features' missing in fit() call.")

        if not isinstance(lupi_features, int):

            raise ValueError("Argument 'lupi_features' is not type int.")

        if not 0 < lupi_features < d:

            raise ValueError(

                "Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."

            )

        self._lupi_features = lupi_features

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        if np.min(y) > 0:

            print("First ordinal class has index > 0. Shifting index...")

            y = y - np.min(y)

        return X, y

Ancestors (in MRO)

  • fri.model.base_type.ProblemType
  • abc.ABC

Static methods

parameters
def parameters(

)
View Source
    @classmethod

    def parameters(cls):

        return ["C", "scaling_lupi_w"]

Instance variables

get_cvxproblem_template
get_initmodel_template
lupi_features

Methods

get_all_parameters
def get_all_parameters(
    self
)
View Source
    def get_all_parameters(self):

        return {p: self.get_chosen_parameter(p) for p in self.parameters()}
get_all_relax_factors
def get_all_relax_factors(
    self
)
View Source
    def get_all_relax_factors(self):

        return {p: self.get_chosen_relax_factors(p) for p in self.relax_factors()}
get_chosen_parameter
def get_chosen_parameter(
    self,
    p
)
View Source
    def get_chosen_parameter(self, p):

        try:

            return [

                self.chosen_parameters_[p]

            ]  # We return list for param search function

        except:

            # # TODO: rewrite the parameter logic

            # # TODO: move this to subclass

            if p == "scaling_lupi_w":

                # return [0.1, 1, 10, 100, 1000]

                return scipy.stats.reciprocal(a=1e-15, b=1e10)

            # if p == "scaling_lupi_loss":

            #    # value 0>p<1 causes standard svm solution

            #    # p>1 encourages usage of lupi function

            #    return scipy.stats.reciprocal(a=1e-15, b=1e15)

            if p == "C":

                return scipy.stats.reciprocal(a=1e-5, b=1e5)

            if p == "epsilon":

                return [0, 0.001, 0.01, 0.1, 1, 10, 100]

            else:

                return scipy.stats.reciprocal(a=1e-10, b=1e10)
get_chosen_relax_factors
def get_chosen_relax_factors(
    self,
    p
)
View Source
    def get_chosen_relax_factors(self, p):

        try:

            factor = self.relax_factors_[p]

        except KeyError:

            try:

                factor = self.relax_factors_[p + "_slack"]

            except KeyError:

                factor = 0.1

        if factor < 0:

            raise ValueError("Slack Factor multiplier is positive!")

        return factor
get_relaxed_constraints
def get_relaxed_constraints(
    self,
    constraints
)
View Source
    def get_relaxed_constraints(self, constraints):

        return {c: self.relax_constraint(c, v) for c, v in constraints.items()}
postprocessing
def postprocessing(
    self,
    bounds
)
View Source
    def postprocessing(self, bounds):

        return bounds
preprocessing
def preprocessing(
    self,
    data,
    lupi_features=None
)
View Source
    def preprocessing(self, data, lupi_features=None):

        X, y = data

        d = X.shape[1]

        if lupi_features is None:

            raise ValueError("Argument 'lupi_features' missing in fit() call.")

        if not isinstance(lupi_features, int):

            raise ValueError("Argument 'lupi_features' is not type int.")

        if not 0 < lupi_features < d:

            raise ValueError(

                "Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."

            )

        self._lupi_features = lupi_features

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        if np.min(y) > 0:

            print("First ordinal class has index > 0. Shifting index...")

            y = y - np.min(y)

        return X, y
relax_constraint
def relax_constraint(
    self,
    key,
    value
)
View Source
    def relax_constraint(self, key, value):

        return value * (1 + self.get_chosen_relax_factors(key))
relax_factors
def relax_factors(
    cls
)
View Source
    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

LUPI_Regression

class LUPI_Regression(
    **kwargs
)

Helper class that provides a standard way to create an ABC using inheritance.

View Source
class LUPI_Regression(ProblemType):

    def __init__(self, **kwargs):

        super().__init__(**kwargs)

        self._lupi_features = None

    @property

    def lupi_features(self):

        return self._lupi_features

    @classmethod

    def parameters(cls):

        return ["C", "epsilon", "scaling_lupi_w", "scaling_lupi_loss"]

    @property

    def get_initmodel_template(cls):

        return LUPI_Regression_SVM

    @property

    def get_cvxproblem_template(cls):

        return LUPI_Regression_Relevance_Bound

    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

    def preprocessing(self, data, lupi_features=None):

        X, y = data

        d = X.shape[1]

        if lupi_features is None:

            raise ValueError("Argument 'lupi_features' missing in fit() call.")

        if not isinstance(lupi_features, int):

            raise ValueError("Argument 'lupi_features' is not type int.")

        if not 0 < lupi_features < d:

            raise ValueError(

                "Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."

            )

        self._lupi_features = lupi_features

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        return X, y

Ancestors (in MRO)

  • fri.model.base_type.ProblemType
  • abc.ABC

Static methods

parameters
def parameters(

)
View Source
    @classmethod

    def parameters(cls):

        return ["C", "epsilon", "scaling_lupi_w", "scaling_lupi_loss"]

Instance variables

get_cvxproblem_template
get_initmodel_template
lupi_features

Methods

get_all_parameters
def get_all_parameters(
    self
)
View Source
    def get_all_parameters(self):

        return {p: self.get_chosen_parameter(p) for p in self.parameters()}
get_all_relax_factors
def get_all_relax_factors(
    self
)
View Source
    def get_all_relax_factors(self):

        return {p: self.get_chosen_relax_factors(p) for p in self.relax_factors()}
get_chosen_parameter
def get_chosen_parameter(
    self,
    p
)
View Source
    def get_chosen_parameter(self, p):

        try:

            return [

                self.chosen_parameters_[p]

            ]  # We return list for param search function

        except:

            # # TODO: rewrite the parameter logic

            # # TODO: move this to subclass

            if p == "scaling_lupi_w":

                # return [0.1, 1, 10, 100, 1000]

                return scipy.stats.reciprocal(a=1e-15, b=1e10)

            # if p == "scaling_lupi_loss":

            #    # value 0>p<1 causes standard svm solution

            #    # p>1 encourages usage of lupi function

            #    return scipy.stats.reciprocal(a=1e-15, b=1e15)

            if p == "C":

                return scipy.stats.reciprocal(a=1e-5, b=1e5)

            if p == "epsilon":

                return [0, 0.001, 0.01, 0.1, 1, 10, 100]

            else:

                return scipy.stats.reciprocal(a=1e-10, b=1e10)
get_chosen_relax_factors
def get_chosen_relax_factors(
    self,
    p
)
View Source
    def get_chosen_relax_factors(self, p):

        try:

            factor = self.relax_factors_[p]

        except KeyError:

            try:

                factor = self.relax_factors_[p + "_slack"]

            except KeyError:

                factor = 0.1

        if factor < 0:

            raise ValueError("Slack Factor multiplier is positive!")

        return factor
get_relaxed_constraints
def get_relaxed_constraints(
    self,
    constraints
)
View Source
    def get_relaxed_constraints(self, constraints):

        return {c: self.relax_constraint(c, v) for c, v in constraints.items()}
postprocessing
def postprocessing(
    self,
    bounds
)
View Source
    def postprocessing(self, bounds):

        return bounds
preprocessing
def preprocessing(
    self,
    data,
    lupi_features=None
)
View Source
    def preprocessing(self, data, lupi_features=None):

        X, y = data

        d = X.shape[1]

        if lupi_features is None:

            raise ValueError("Argument 'lupi_features' missing in fit() call.")

        if not isinstance(lupi_features, int):

            raise ValueError("Argument 'lupi_features' is not type int.")

        if not 0 < lupi_features < d:

            raise ValueError(

                "Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."

            )

        self._lupi_features = lupi_features

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        return X, y
relax_constraint
def relax_constraint(
    self,
    key,
    value
)
View Source
    def relax_constraint(self, key, value):

        return value * (1 + self.get_chosen_relax_factors(key))
relax_factors
def relax_factors(
    cls
)
View Source
    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

OrdinalRegression

class OrdinalRegression(
    **kwargs
)

Helper class that provides a standard way to create an ABC using inheritance.

View Source
class OrdinalRegression(ProblemType):

    @classmethod

    def parameters(cls):

        return ["C"]

    @property

    def get_initmodel_template(cls):

        return OrdinalRegression_SVM

    @property

    def get_cvxproblem_template(cls):

        return OrdinalRegression_Relevance_Bound

    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

    def preprocessing(self, data, **kwargs):

        X, y = data

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        if np.min(y) > 0:

            print("First ordinal class has index > 0. Shifting index...")

            y = y - np.min(y)

        return X, y

Ancestors (in MRO)

  • fri.model.base_type.ProblemType
  • abc.ABC

Static methods

parameters
def parameters(

)
View Source
    @classmethod

    def parameters(cls):

        return ["C"]

Instance variables

get_cvxproblem_template
get_initmodel_template

Methods

get_all_parameters
def get_all_parameters(
    self
)
View Source
    def get_all_parameters(self):

        return {p: self.get_chosen_parameter(p) for p in self.parameters()}
get_all_relax_factors
def get_all_relax_factors(
    self
)
View Source
    def get_all_relax_factors(self):

        return {p: self.get_chosen_relax_factors(p) for p in self.relax_factors()}
get_chosen_parameter
def get_chosen_parameter(
    self,
    p
)
View Source
    def get_chosen_parameter(self, p):

        try:

            return [

                self.chosen_parameters_[p]

            ]  # We return list for param search function

        except:

            # # TODO: rewrite the parameter logic

            # # TODO: move this to subclass

            if p == "scaling_lupi_w":

                # return [0.1, 1, 10, 100, 1000]

                return scipy.stats.reciprocal(a=1e-15, b=1e10)

            # if p == "scaling_lupi_loss":

            #    # value 0>p<1 causes standard svm solution

            #    # p>1 encourages usage of lupi function

            #    return scipy.stats.reciprocal(a=1e-15, b=1e15)

            if p == "C":

                return scipy.stats.reciprocal(a=1e-5, b=1e5)

            if p == "epsilon":

                return [0, 0.001, 0.01, 0.1, 1, 10, 100]

            else:

                return scipy.stats.reciprocal(a=1e-10, b=1e10)
get_chosen_relax_factors
def get_chosen_relax_factors(
    self,
    p
)
View Source
    def get_chosen_relax_factors(self, p):

        try:

            factor = self.relax_factors_[p]

        except KeyError:

            try:

                factor = self.relax_factors_[p + "_slack"]

            except KeyError:

                factor = 0.1

        if factor < 0:

            raise ValueError("Slack Factor multiplier is positive!")

        return factor
get_relaxed_constraints
def get_relaxed_constraints(
    self,
    constraints
)
View Source
    def get_relaxed_constraints(self, constraints):

        return {c: self.relax_constraint(c, v) for c, v in constraints.items()}
postprocessing
def postprocessing(
    self,
    bounds
)
View Source
    def postprocessing(self, bounds):

        return bounds
preprocessing
def preprocessing(
    self,
    data,
    **kwargs
)
View Source
    def preprocessing(self, data, **kwargs):

        X, y = data

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        if np.min(y) > 0:

            print("First ordinal class has index > 0. Shifting index...")

            y = y - np.min(y)

        return X, y
relax_constraint
def relax_constraint(
    self,
    key,
    value
)
View Source
    def relax_constraint(self, key, value):

        return value * (1 + self.get_chosen_relax_factors(key))
relax_factors
def relax_factors(
    cls
)
View Source
    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

Regression

class Regression(
    **kwargs
)

Helper class that provides a standard way to create an ABC using inheritance.

View Source
class Regression(ProblemType):

    @classmethod

    def parameters(cls):

        return ["C", "epsilon"]

    @property

    def get_initmodel_template(cls):

        return Regression_SVR

    @property

    def get_cvxproblem_template(cls):

        return Regression_Relevance_Bound

    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]

    def preprocessing(self, data, **kwargs):

        X, y = data

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        return X, y

Ancestors (in MRO)

  • fri.model.base_type.ProblemType
  • abc.ABC

Static methods

parameters
def parameters(

)
View Source
    @classmethod

    def parameters(cls):

        return ["C", "epsilon"]

Instance variables

get_cvxproblem_template
get_initmodel_template

Methods

get_all_parameters
def get_all_parameters(
    self
)
View Source
    def get_all_parameters(self):

        return {p: self.get_chosen_parameter(p) for p in self.parameters()}
get_all_relax_factors
def get_all_relax_factors(
    self
)
View Source
    def get_all_relax_factors(self):

        return {p: self.get_chosen_relax_factors(p) for p in self.relax_factors()}
get_chosen_parameter
def get_chosen_parameter(
    self,
    p
)
View Source
    def get_chosen_parameter(self, p):

        try:

            return [

                self.chosen_parameters_[p]

            ]  # We return list for param search function

        except:

            # # TODO: rewrite the parameter logic

            # # TODO: move this to subclass

            if p == "scaling_lupi_w":

                # return [0.1, 1, 10, 100, 1000]

                return scipy.stats.reciprocal(a=1e-15, b=1e10)

            # if p == "scaling_lupi_loss":

            #    # value 0>p<1 causes standard svm solution

            #    # p>1 encourages usage of lupi function

            #    return scipy.stats.reciprocal(a=1e-15, b=1e15)

            if p == "C":

                return scipy.stats.reciprocal(a=1e-5, b=1e5)

            if p == "epsilon":

                return [0, 0.001, 0.01, 0.1, 1, 10, 100]

            else:

                return scipy.stats.reciprocal(a=1e-10, b=1e10)
get_chosen_relax_factors
def get_chosen_relax_factors(
    self,
    p
)
View Source
    def get_chosen_relax_factors(self, p):

        try:

            factor = self.relax_factors_[p]

        except KeyError:

            try:

                factor = self.relax_factors_[p + "_slack"]

            except KeyError:

                factor = 0.1

        if factor < 0:

            raise ValueError("Slack Factor multiplier is positive!")

        return factor
get_relaxed_constraints
def get_relaxed_constraints(
    self,
    constraints
)
View Source
    def get_relaxed_constraints(self, constraints):

        return {c: self.relax_constraint(c, v) for c, v in constraints.items()}
postprocessing
def postprocessing(
    self,
    bounds
)
View Source
    def postprocessing(self, bounds):

        return bounds
preprocessing
def preprocessing(
    self,
    data,
    **kwargs
)
View Source
    def preprocessing(self, data, **kwargs):

        X, y = data

        # Check that X and y have correct shape

        X, y = check_X_y(X, y)

        return X, y
relax_constraint
def relax_constraint(
    self,
    key,
    value
)
View Source
    def relax_constraint(self, key, value):

        return value * (1 + self.get_chosen_relax_factors(key))
relax_factors
def relax_factors(
    cls
)
View Source
    def relax_factors(cls):

        return ["loss_slack", "w_l1_slack"]