Source code for featuretools.feature_base.feature_base
fromwoodwork.column_schemaimportColumnSchemafromwoodwork.logical_typesimportBoolean,BooleanNullablefromfeaturetoolsimportprimitivesfromfeaturetools.entityset.relationshipimportRelationship,RelationshipPathfromfeaturetools.entityset.timedeltaimportTimedeltafromfeaturetools.feature_base.utilsimportis_valid_inputfromfeaturetools.primitives.baseimport(AggregationPrimitive,PrimitiveBase,TransformPrimitive,)fromfeaturetools.utils.wrangleimport_check_time_against_column,_check_timedelta_ES_REF={}classFeatureBase(object):def__init__(self,dataframe,base_features,relationship_path,primitive,name=None,names=None,):"""Base class for all features Args: entityset (EntitySet): entityset this feature is being calculated for dataframe (DataFrame): dataframe for calculating this feature base_features (list[FeatureBase]): list of base features for primitive relationship_path (RelationshipPath): path from this dataframe to the dataframe of the base features. primitive (:class:`.PrimitiveBase`): primitive to calculate. if not initialized when passed, gets initialized with no arguments """assertall(isinstance(f,FeatureBase)forfinbase_features),"All base features must be features"self.dataframe_name=dataframe.ww.nameself.entityset=_ES_REF[dataframe.ww.metadata["entityset_id"]]self.base_features=base_features# initialize if not already initializedifnotisinstance(primitive,PrimitiveBase):primitive=primitive()self.primitive=primitiveself.relationship_path=relationship_pathself._name=nameself._names=namesassertself._check_input_types(),("Provided inputs don't match input ""type requirements")def__getitem__(self,key):assert(self.number_output_features>1),"can only access slice of multi-output feature"assert(self.number_output_features>key),"index is higher than the number of outputs"returnFeatureOutputSlice(self,key)@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):raiseNotImplementedError("Must define from_dictionary on FeatureBase subclass")
[docs]defrename(self,name):"""Rename Feature, returns copy. Will reset any custom feature column names to their default value."""feature_copy=self.copy()feature_copy._name=namefeature_copy._names=Nonereturnfeature_copy
defcopy(self):raiseNotImplementedError("Must define copy on FeatureBase subclass")defget_name(self):ifnotself._name:self._name=self.generate_name()returnself._namedefget_feature_names(self):ifnotself._names:ifself.number_output_features==1:self._names=[self.get_name()]else:self._names=self.generate_names()ifself.get_name()!=self.generate_name():self._names=[self.get_name()+"[{}]".format(i)foriinrange(len(self._names))]returnself._namesdefset_feature_names(self,names):"""Set new values for the feature column names, overriding the default values. Number of names provided must match the number of output columns defined for the feature, and all provided names should be unique. Only works for features that have more than one output column. Use ``Feature.rename`` to change the column name for single output features. Args: names (list[str]): List of names to use for the output feature columns. Provided names must be unique. """ifself.number_output_features==1:raiseValueError("The set_feature_names can only be used on features that have more than one output column.",)num_new_names=len(names)ifself.number_output_features!=num_new_names:raiseValueError("Number of names provided must match the number of output features:"f" {num_new_names} name(s) provided, {self.number_output_features} expected.",)iflen(set(names))!=num_new_names:raiseValueError("Provided output feature names must be unique.")self._names=namesdefget_function(self,**kwargs):returnself.primitive.get_function(**kwargs)defget_dependencies(self,deep=False,ignored=None,copy=True):"""Returns features that are used to calculate this feature ..note:: If you only want the features that make up the input to the feature function use the base_features attribute instead. """deps=[]fordinself.base_features[:]:deps+=[d]ifhasattr(self,"where")andself.where:deps+=[self.where]ifignoredisNone:ignored=set([])deps=[dfordindepsifd.unique_name()notinignored]ifdeep:fordepindeps[:]:# copy so we don't modify list we iterate overdeep_deps=dep.get_dependencies(deep,ignored)deps+=deep_depsreturndeps
[docs]defget_depth(self,stop_at=None):"""Returns depth of feature"""max_depth=0stop_at_set=set()ifstop_atisnotNone:stop_at_set=set([i.unique_name()foriinstop_at])ifself.unique_name()instop_at_set:return0fordepinself.get_dependencies(deep=True,ignored=stop_at_set):max_depth=max(dep.get_depth(stop_at=stop_at),max_depth)returnmax_depth+1
def_check_input_types(self):iflen(self.base_features)==0:returnTrueinput_types=self.primitive.input_typesifinput_typesisnotNone:ifnotisinstance(input_types[0],list):input_types=[input_types]fortininput_types:zipped=list(zip(t,self.base_features))ifall([is_valid_input(f.column_schema,t)fort,finzipped]):returnTrueelse:returnTruereturnFalse@propertydefdataframe(self):"""Dataframe this feature belongs too"""returnself.entityset[self.dataframe_name]@propertydefnumber_output_features(self):returnself.primitive.number_output_featuresdef__repr__(self):return"<Feature: %s>"%(self.get_name())defhash(self):returnhash(self.get_name()+self.dataframe_name)def__hash__(self):returnself.hash()@propertydefcolumn_schema(self):feature=selfcolumn_schema=self.primitive.return_typewhilecolumn_schemaisNone:# get column_schema of first base featurebase_feature=feature.base_features[0]column_schema=base_feature.column_schema# only the original time index should exist# so make this feature's return type just a Datetimeif"time_index"incolumn_schema.semantic_tags:column_schema=ColumnSchema(logical_type=column_schema.logical_type,semantic_tags=column_schema.semantic_tags-{"time_index"},)elif"index"incolumn_schema.semantic_tags:column_schema=ColumnSchema(logical_type=column_schema.logical_type,semantic_tags=column_schema.semantic_tags-{"index"},)# Need to add back in the numeric standard tag so the schema can get recognized# as a valid return typeifcolumn_schema.is_numeric:column_schema.semantic_tags.add("numeric")ifcolumn_schema.is_categorical:column_schema.semantic_tags.add("category")# direct features should keep the foreign key tag, but all other features should get convertedif(notisinstance(feature,DirectFeature)and"foreign_key"incolumn_schema.semantic_tags):column_schema=ColumnSchema(logical_type=column_schema.logical_type,semantic_tags=column_schema.semantic_tags-{"foreign_key"},)feature=base_featurereturncolumn_schema@propertydefdefault_value(self):returnself.primitive.default_valuedefget_arguments(self):raiseNotImplementedError("Must define get_arguments on FeatureBase subclass")defto_dictionary(self):return{"type":type(self).__name__,"dependencies":[dep.unique_name()fordepinself.get_dependencies()],"arguments":self.get_arguments(),}def_handle_binary_comparison(self,other,Primitive,PrimitiveScalar):ifisinstance(other,FeatureBase):returnFeature([self,other],primitive=Primitive)returnFeature([self],primitive=PrimitiveScalar(other))def__eq__(self,other):"""Compares to other by equality"""returnself._handle_binary_comparison(other,primitives.Equal,primitives.EqualScalar,)def__ne__(self,other):"""Compares to other by non-equality"""returnself._handle_binary_comparison(other,primitives.NotEqual,primitives.NotEqualScalar,)def__gt__(self,other):"""Compares if greater than other"""returnself._handle_binary_comparison(other,primitives.GreaterThan,primitives.GreaterThanScalar,)def__ge__(self,other):"""Compares if greater than or equal to other"""returnself._handle_binary_comparison(other,primitives.GreaterThanEqualTo,primitives.GreaterThanEqualToScalar,)def__lt__(self,other):"""Compares if less than other"""returnself._handle_binary_comparison(other,primitives.LessThan,primitives.LessThanScalar,)def__le__(self,other):"""Compares if less than or equal to other"""returnself._handle_binary_comparison(other,primitives.LessThanEqualTo,primitives.LessThanEqualToScalar,)def__add__(self,other):"""Add other"""returnself._handle_binary_comparison(other,primitives.AddNumeric,primitives.AddNumericScalar,)def__radd__(self,other):returnself.__add__(other)def__sub__(self,other):"""Subtract other"""returnself._handle_binary_comparison(other,primitives.SubtractNumeric,primitives.SubtractNumericScalar,)def__rsub__(self,other):returnFeature([self],primitive=primitives.ScalarSubtractNumericFeature(other))def__div__(self,other):"""Divide by other"""returnself._handle_binary_comparison(other,primitives.DivideNumeric,primitives.DivideNumericScalar,)def__truediv__(self,other):returnself.__div__(other)def__rtruediv__(self,other):returnself.__rdiv__(other)def__rdiv__(self,other):returnFeature([self],primitive=primitives.DivideByFeature(other))def__mul__(self,other):"""Multiply by other"""ifisinstance(other,FeatureBase):ifall([isinstance(f.column_schema.logical_type,(Boolean,BooleanNullable))forfin(self,other)],):returnFeature([self,other],primitive=primitives.MultiplyBoolean)if("numeric"inself.column_schema.semantic_tagsandisinstance(other.column_schema.logical_type,(Boolean,BooleanNullable),)or"numeric"inother.column_schema.semantic_tagsandisinstance(self.column_schema.logical_type,(Boolean,BooleanNullable),)):returnFeature([self,other],primitive=primitives.MultiplyNumericBoolean,)returnself._handle_binary_comparison(other,primitives.MultiplyNumeric,primitives.MultiplyNumericScalar,)def__rmul__(self,other):returnself.__mul__(other)def__mod__(self,other):"""Take modulus of other"""returnself._handle_binary_comparison(other,primitives.ModuloNumeric,primitives.ModuloNumericScalar,)def__rmod__(self,other):returnFeature([self],primitive=primitives.ModuloByFeature(other))def__and__(self,other):returnself.AND(other)def__rand__(self,other):returnFeature([other,self],primitive=primitives.And)def__or__(self,other):returnself.OR(other)def__ror__(self,other):returnFeature([other,self],primitive=primitives.Or)def__not__(self,other):returnself.NOT(other)def__abs__(self):returnFeature([self],primitive=primitives.Absolute)def__neg__(self):returnFeature([self],primitive=primitives.Negate)defAND(self,other_feature):"""Logical AND with other_feature"""returnFeature([self,other_feature],primitive=primitives.And)defOR(self,other_feature):"""Logical OR with other_feature"""returnFeature([self,other_feature],primitive=primitives.Or)defNOT(self):"""Creates inverse of feature"""returnFeature([self],primitive=primitives.Not)defisin(self,list_of_output):returnFeature([self],primitive=primitives.IsIn(list_of_outputs=list_of_output),)defis_null(self):"""Compares feature to null by equality"""returnFeature([self],primitive=primitives.IsNull)def__invert__(self):returnself.NOT()defunique_name(self):return"%s: %s"%(self.dataframe_name,self.get_name())defrelationship_path_name(self):returnself.relationship_path.nameclassIdentityFeature(FeatureBase):"""Feature for dataframe that is equivalent to underlying column"""def__init__(self,column,name=None):self.column_name=column.ww.nameself.return_type=column.ww.schemametadata=column.ww.schema._metadataes=_ES_REF[metadata["entityset_id"]]super(IdentityFeature,self).__init__(dataframe=es[metadata["dataframe_name"]],base_features=[],relationship_path=RelationshipPath([]),primitive=PrimitiveBase,name=name,)@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):dataframe_name=arguments["dataframe_name"]column_name=arguments["column_name"]column=entityset[dataframe_name].ww[column_name]returncls(column=column,name=arguments["name"])defcopy(self):"""Return copy of feature"""returnIdentityFeature(self.entityset[self.dataframe_name].ww[self.column_name])defgenerate_name(self):returnself.column_namedefget_depth(self,stop_at=None):return0defget_arguments(self):return{"name":self.get_name(),"column_name":self.column_name,"dataframe_name":self.dataframe_name,}@propertydefcolumn_schema(self):returnself.return_typeclassDirectFeature(FeatureBase):"""Feature for child dataframe that inherits a feature value from a parent dataframe"""input_types=[ColumnSchema()]return_type=Nonedef__init__(self,base_feature,child_dataframe_name,relationship=None,name=None,):base_feature=_validate_base_features(base_feature)[0]self.parent_dataframe_name=base_feature.dataframe_namerelationship=self._handle_relationship(base_feature.entityset,child_dataframe_name,relationship,)child_dataframe=base_feature.entityset[child_dataframe_name]super(DirectFeature,self).__init__(dataframe=child_dataframe,base_features=[base_feature],relationship_path=RelationshipPath([(True,relationship)]),primitive=PrimitiveBase,name=name,)def_handle_relationship(self,entityset,child_dataframe_name,relationship):child_dataframe=entityset[child_dataframe_name]ifrelationship:relationship_child=relationship.child_dataframeassert(child_dataframe.ww.name==relationship_child.ww.name),"child_dataframe must be the relationship child dataframe"assert(self.parent_dataframe_name==relationship.parent_dataframe.ww.name),"Base feature must be defined on the relationship parent dataframe"else:child_relationships=entityset.get_forward_relationships(child_dataframe.ww.name,)possible_relationships=(rforrinchild_relationshipsifr.parent_dataframe.ww.name==self.parent_dataframe_name)relationship=next(possible_relationships,None)ifnotrelationship:raiseRuntimeError('No relationship from "%s" to "%s" found.'%(child_dataframe.ww.name,self.parent_dataframe_name),)# Check for another path.elifnext(possible_relationships,None):message=("There are multiple relationships to the base dataframe. ""You must specify a relationship.")raiseRuntimeError(message)returnrelationship@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):base_feature=dependencies[arguments["base_feature"]]relationship=Relationship.from_dictionary(arguments["relationship"],entityset,)child_dataframe_name=relationship.child_dataframe.ww.namereturncls(base_feature=base_feature,child_dataframe_name=child_dataframe_name,relationship=relationship,name=arguments["name"],)@propertydefnumber_output_features(self):returnself.base_features[0].number_output_features@propertydefdefault_value(self):returnself.base_features[0].default_valuedefcopy(self):"""Return copy of feature"""_is_forward,relationship=self.relationship_path[0]returnDirectFeature(self.base_features[0],self.dataframe_name,relationship=relationship,)@propertydefcolumn_schema(self):returnself.base_features[0].column_schemadefgenerate_name(self):returnself._name_from_base(self.base_features[0].get_name())defgenerate_names(self):return[self._name_from_base(base_name)forbase_nameinself.base_features[0].get_feature_names()]defget_arguments(self):_is_forward,relationship=self.relationship_path[0]return{"name":self.get_name(),"base_feature":self.base_features[0].unique_name(),"relationship":relationship.to_dictionary(),}def_name_from_base(self,base_name):return"%s.%s"%(self.relationship_path_name(),base_name)classAggregationFeature(FeatureBase):# Feature to condition this feature by in# computation (e.g. take the Count of products where the product_id is# "basketball".)where=None#: (str or :class:`.Timedelta`): Use only some amount of previous data from# each time point during calculationuse_previous=Nonedef__init__(self,base_features,parent_dataframe_name,primitive,relationship_path=None,use_previous=None,where=None,name=None,):base_features=_validate_base_features(base_features)forbfinbase_features:ifbf.number_output_features>1:raiseValueError("Cannot stack on whole multi-output feature.")self.child_dataframe_name=base_features[0].dataframe_nameentityset=base_features[0].entitysetrelationship_path,self._path_is_unique=self._handle_relationship_path(entityset,parent_dataframe_name,relationship_path,)self.parent_dataframe_name=parent_dataframe_nameifwhereisnotNone:self.where=_validate_base_features(where)[0]msg="Where feature must be defined on child dataframe {}".format(self.child_dataframe_name,)assertself.where.dataframe_name==self.child_dataframe_name,msgifuse_previous:assertentityset[self.child_dataframe_name].ww.time_indexisnotNone,("Applying function that requires time index to dataframe that ""doesn't have one")self.use_previous=_check_timedelta(use_previous)assertlen(base_features)>0time_index=base_features[0].dataframe.ww.time_indextime_col=base_features[0].dataframe.ww[time_index]asserttime_indexisnotNone,("Use previous can only be defined ""on dataframes with a time index")assert_check_time_against_column(self.use_previous,time_col)super(AggregationFeature,self).__init__(dataframe=entityset[parent_dataframe_name],base_features=base_features,relationship_path=relationship_path,primitive=primitive,name=name,)def_handle_relationship_path(self,entityset,parent_dataframe_name,relationship_path,):parent_dataframe=entityset[parent_dataframe_name]child_dataframe=entityset[self.child_dataframe_name]ifrelationship_path:assertall(notis_forwardforis_forward,_rinrelationship_path),"All relationships in path must be backward"_is_forward,first_relationship=relationship_path[0]first_parent=first_relationship.parent_dataframeassert(parent_dataframe.ww.name==first_parent.ww.name),"parent_dataframe must match first relationship in path."_is_forward,last_relationship=relationship_path[-1]assert(child_dataframe.ww.name==last_relationship.child_dataframe.ww.name),"Base feature must be defined on the dataframe at the end of relationship_path"path_is_unique=entityset.has_unique_forward_path(child_dataframe.ww.name,parent_dataframe.ww.name,)else:paths=entityset.find_backward_paths(parent_dataframe.ww.name,child_dataframe.ww.name,)first_path=next(paths,None)ifnotfirst_path:raiseRuntimeError('No backward path from "%s" to "%s" found.'%(parent_dataframe.ww.name,child_dataframe.ww.name),)# Check for another path.elifnext(paths,None):message=("There are multiple possible paths to the base dataframe. ""You must specify a relationship path.")raiseRuntimeError(message)relationship_path=RelationshipPath([(False,r)forrinfirst_path])path_is_unique=Truereturnrelationship_path,path_is_unique@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):base_features=[dependencies[name]fornameinarguments["base_features"]]relationship_path=[Relationship.from_dictionary(r,entityset)forrinarguments["relationship_path"]]parent_dataframe_name=relationship_path[0].parent_dataframe.ww.namerelationship_path=RelationshipPath([(False,r)forrinrelationship_path])use_previous_data=arguments["use_previous"]use_previous=use_previous_dataandTimedelta.from_dictionary(use_previous_data,)where_name=arguments["where"]where=where_nameanddependencies[where_name]feat=cls(base_features=base_features,parent_dataframe_name=parent_dataframe_name,primitive=primitive,relationship_path=relationship_path,use_previous=use_previous,where=where,name=arguments["name"],)feat._names=arguments.get("feature_names")returnfeatdefcopy(self):returnAggregationFeature(self.base_features,parent_dataframe_name=self.parent_dataframe_name,relationship_path=self.relationship_path,primitive=self.primitive,use_previous=self.use_previous,where=self.where,)def_where_str(self):ifself.whereisnotNone:where_str=" WHERE "+self.where.get_name()else:where_str=""returnwhere_strdef_use_prev_str(self):ifself.use_previousisnotNoneandhasattr(self.use_previous,"get_name"):use_prev_str=", Last {}".format(self.use_previous.get_name())else:use_prev_str=""returnuse_prev_strdefgenerate_name(self):returnself.primitive.generate_name(base_feature_names=[bf.get_name()forbfinself.base_features],relationship_path_name=self.relationship_path_name(),parent_dataframe_name=self.parent_dataframe_name,where_str=self._where_str(),use_prev_str=self._use_prev_str(),)defgenerate_names(self):returnself.primitive.generate_names(base_feature_names=[bf.get_name()forbfinself.base_features],relationship_path_name=self.relationship_path_name(),parent_dataframe_name=self.parent_dataframe_name,where_str=self._where_str(),use_prev_str=self._use_prev_str(),)defget_arguments(self):arg_dict={"name":self.get_name(),"base_features":[feat.unique_name()forfeatinself.base_features],"relationship_path":[r.to_dictionary()for_,rinself.relationship_path],"primitive":self.primitive,"where":self.whereandself.where.unique_name(),"use_previous":self.use_previousandself.use_previous.get_arguments(),}ifself.number_output_features>1:arg_dict["feature_names"]=self.get_feature_names()returnarg_dictdefrelationship_path_name(self):ifself._path_is_unique:returnself.child_dataframe_nameelse:returnself.relationship_path.nameclassTransformFeature(FeatureBase):def__init__(self,base_features,primitive,name=None):base_features=_validate_base_features(base_features)forbfinbase_features:ifbf.number_output_features>1:raiseValueError("Cannot stack on whole multi-output feature.")dataframe=base_features[0].entityset[base_features[0].dataframe_name]super(TransformFeature,self).__init__(dataframe=dataframe,base_features=base_features,relationship_path=RelationshipPath([]),primitive=primitive,name=name,)@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):base_features=[dependencies[name]fornameinarguments["base_features"]]feat=cls(base_features=base_features,primitive=primitive,name=arguments["name"],)feat._names=arguments.get("feature_names")returnfeatdefcopy(self):returnTransformFeature(self.base_features,self.primitive)defgenerate_name(self):returnself.primitive.generate_name(base_feature_names=[bf.get_name()forbfinself.base_features],)defgenerate_names(self):returnself.primitive.generate_names(base_feature_names=[bf.get_name()forbfinself.base_features],)defget_arguments(self):arg_dict={"name":self.get_name(),"base_features":[feat.unique_name()forfeatinself.base_features],"primitive":self.primitive,}ifself.number_output_features>1:arg_dict["feature_names"]=self.get_feature_names()returnarg_dictclassGroupByTransformFeature(TransformFeature):def__init__(self,base_features,primitive,groupby,name=None):ifnotisinstance(groupby,FeatureBase):groupby=IdentityFeature(groupby)assert(len({"category","foreign_key"}-groupby.column_schema.semantic_tags)<2)self.groupby=groupbybase_features=_validate_base_features(base_features)base_features.append(groupby)super(GroupByTransformFeature,self).__init__(base_features=base_features,primitive=primitive,name=name,)@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):base_features=[dependencies[name]fornameinarguments["base_features"]]groupby=dependencies[arguments["groupby"]]feat=cls(base_features=base_features,primitive=primitive,groupby=groupby,name=arguments["name"],)feat._names=arguments.get("feature_names")returnfeatdefcopy(self):# the groupby feature is appended to base_features in the __init__# so here we separate them againreturnGroupByTransformFeature(self.base_features[:-1],self.primitive,self.groupby,)defgenerate_name(self):# exclude the groupby feature from base_names since it has a special# place in the feature namebase_names=[bf.get_name()forbfinself.base_features[:-1]]_name=self.primitive.generate_name(base_names)return"{} by {}".format(_name,self.groupby.get_name())defgenerate_names(self):base_names=[bf.get_name()forbfinself.base_features[:-1]]_names=self.primitive.generate_names(base_names)names=[name+" by {}".format(self.groupby.get_name())fornamein_names]returnnamesdefget_arguments(self):# Do not include groupby in base_features.feature_names=[feat.unique_name()forfeatinself.base_featuresiffeat.unique_name()!=self.groupby.unique_name()]arg_dict={"name":self.get_name(),"base_features":feature_names,"primitive":self.primitive,"groupby":self.groupby.unique_name(),}ifself.number_output_features>1:arg_dict["feature_names"]=self.get_feature_names()returnarg_dictclassFeature(object):""" Alias to create feature. Infers the feature type based on init parameters. """def__new__(self,base,dataframe_name=None,groupby=None,parent_dataframe_name=None,primitive=None,use_previous=None,where=None,):# either direct or identityifprimitiveisNoneanddataframe_nameisNone:returnIdentityFeature(base)elifprimitiveisNoneanddataframe_nameisnotNone:returnDirectFeature(base,dataframe_name)elifprimitiveisnotNoneandparent_dataframe_nameisnotNone:assertisinstance(primitive,AggregationPrimitive)orissubclass(primitive,AggregationPrimitive,)returnAggregationFeature(base,parent_dataframe_name=parent_dataframe_name,use_previous=use_previous,where=where,primitive=primitive,)elifprimitiveisnotNone:assertisinstance(primitive,TransformPrimitive)orissubclass(primitive,TransformPrimitive,)ifgroupbyisnotNone:returnGroupByTransformFeature(base,primitive=primitive,groupby=groupby,)returnTransformFeature(base,primitive=primitive)raiseException("Unrecognized feature initialization")classFeatureOutputSlice(FeatureBase):""" Class to access specific multi output feature column """def__init__(self,base_feature,n,name=None):base_features=[base_feature]self.num_output_parent=base_feature.number_output_featuresmsg="cannot access slice from single output feature"assertself.num_output_parent>1,msgmsg="cannot access column that is not between 0 and "+str(self.num_output_parent-1,)assertn<self.num_output_parent,msgself.n=nself._name=nameself._names=[name]ifnameelseNoneself.base_features=base_featuresself.base_feature=base_features[0]self.dataframe_name=base_feature.dataframe_nameself.entityset=base_feature.entitysetself.primitive=base_feature.primitiveself.relationship_path=base_feature.relationship_pathdef__getitem__(self,key):raiseValueError("Cannot get item from slice of multi output feature")defgenerate_name(self):returnself.base_feature.get_feature_names()[self.n]@propertydefnumber_output_features(self):return1defget_arguments(self):return{"name":self.get_name(),"base_feature":self.base_feature.unique_name(),"n":self.n,}@classmethoddeffrom_dictionary(cls,arguments,entityset,dependencies,primitive):base_feature_name=arguments["base_feature"]base_feature=dependencies[base_feature_name]n=arguments["n"]name=arguments["name"]returncls(base_feature=base_feature,n=n,name=name)defcopy(self):returnFeatureOutputSlice(self.base_feature,self.n)def_validate_base_features(feature):if"Series"==type(feature).__name__:return[IdentityFeature(feature)]elifhasattr(feature,"__iter__"):features=[_validate_base_features(f)[0]forfinfeature]msg="all base features must share the same dataframe"assertlen(set([bf.dataframe_nameforbfinfeatures]))==1,msgreturnfeatureselifisinstance(feature,FeatureBase):return[feature]else:raiseException("Not a feature")